from sklearn.metrics import classification_report, confusion_matrix, ConfusionMatrixDisplay
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.model_selection import train_test_split
from sklearn.datasets import make_blobs
import cv2
import gym
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import seaborn as sns
from tqdm import tqdm
import numpy as np
import matplotlib.cm as cm
from IPython import display as ipythondisplay
from tensorflow.keras import datasets, layers, models
from tensorflow.keras.layers import Input, UpSampling2D, Conv2D, Dense, Bidirectional, ZeroPadding2D,LeakyReLU, Reshape, Flatten, Dropout, Activation, BatchNormalization, LSTM, Embedding, GlobalAveragePooling2D, BatchNormalization, MaxPool2D, Conv2D, SpatialDropout2D
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint
from tensorflow.keras.preprocessing.sequence import TimeseriesGenerator
from tensorflow.keras.applications import ResNet50
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.datasets import cifar10, fashion_mnist
from tensorflow.keras.models import Model, load_model
from tensorflow.keras import Sequential
import tensorflow as tf
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.preprocessing import image
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.optimizers import Adam
import pickle
import os
from tensorflow import keras
from keras.utils.vis_utils import plot_model
a) $f(x_1, x_2) = (1-x_1)^2 + 100(x_2 - x_1^2)^2 \hspace{1cm} -10\leq x_1 \leq 10$ \, $-10\leq x_2 \leq 10$
# a
x1, x2 = np.meshgrid(np.linspace(-10, 10, 100), np.linspace(-10, 10, 100))
z = (1-x1)**2 + 100*(x2-x1**2)**2
# graphic b
fig, ax = plt.subplots(figsize=(10, 7), subplot_kw=dict(projection='3d'))
ax.plot_surface(x1, x2, z)
ax.set(
xlabel='$x_1$',
ylabel='$x_2$',
zlabel='$f(x_1, x_2)$'
)
plt.tight_layout()
plt.show()
# split train and test
x_train, x_test, y_train, y_test = train_test_split(
np.vstack([x1.flatten(), x2.flatten()]).T,
z.flatten(),
test_size=0.2,
random_state=505
)
# graphic before train_test_split
fig, ax = plt.subplots(figsize=(10, 7), subplot_kw=dict(projection='3d'))
ax.plot_wireframe(x1, x2, z, linewidths=0.5, color='lightgrey')
ax.scatter(x_train[:,0], x_train[:,1], y_train, s=1, color='darkorange', label='Training data')
ax.scatter(x_test[:,0], x_test[:,1], y_test, s=5, color='darkgreen', label='Test data')
ax.set(
xlabel='$x_1$',
ylabel='$x_2$',
zlabel='$f(x_1, x_2)$'
)
plt.legend()
plt.tight_layout()
plt.show()
# mlp model
mlp = Sequential([
Dense(64, activation='relu', input_shape=(2,)),
Dense(32, activation='relu'),
Dense(16, activation='relu'),
Dense(8, activation='relu'),
Dense(4, activation='relu'),
Dense(1, activation='linear')
])
mlp.compile(
loss='mean_squared_error',
optimizer='adam'
)
mlp.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense (Dense) (None, 64) 192
dense_1 (Dense) (None, 32) 2080
dense_2 (Dense) (None, 16) 528
dense_3 (Dense) (None, 8) 136
dense_4 (Dense) (None, 4) 36
dense_5 (Dense) (None, 1) 5
=================================================================
Total params: 2,977
Trainable params: 2,977
Non-trainable params: 0
_________________________________________________________________
history = mlp.fit(
x_train, y_train,
batch_size=8,
epochs=100,
validation_split=0.1,
callbacks=[
tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10),
tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=5, min_lr=0.0001)
]
)
Epoch 1/100 900/900 [==============================] - 5s 3ms/step - loss: 72607186944.0000 - val_loss: 33732118528.0000 - lr: 0.0010 Epoch 2/100 900/900 [==============================] - 2s 2ms/step - loss: 32486748160.0000 - val_loss: 28747450368.0000 - lr: 0.0010 Epoch 3/100 900/900 [==============================] - 2s 3ms/step - loss: 27862544384.0000 - val_loss: 24206911488.0000 - lr: 0.0010 Epoch 4/100 900/900 [==============================] - 2s 3ms/step - loss: 21649389568.0000 - val_loss: 18433220608.0000 - lr: 0.0010 Epoch 5/100 900/900 [==============================] - 2s 2ms/step - loss: 15748280320.0000 - val_loss: 14110245888.0000 - lr: 0.0010 Epoch 6/100 900/900 [==============================] - 2s 2ms/step - loss: 11427734528.0000 - val_loss: 10153138176.0000 - lr: 0.0010 Epoch 7/100 900/900 [==============================] - 2s 3ms/step - loss: 7851578880.0000 - val_loss: 6819923456.0000 - lr: 0.0010 Epoch 8/100 900/900 [==============================] - 2s 3ms/step - loss: 4933043200.0000 - val_loss: 4014416896.0000 - lr: 0.0010 Epoch 9/100 900/900 [==============================] - 2s 3ms/step - loss: 3069329408.0000 - val_loss: 2642709760.0000 - lr: 0.0010 Epoch 10/100 900/900 [==============================] - 2s 3ms/step - loss: 2414481664.0000 - val_loss: 2180010496.0000 - lr: 0.0010 Epoch 11/100 900/900 [==============================] - 2s 3ms/step - loss: 2167960320.0000 - val_loss: 2112446848.0000 - lr: 0.0010 Epoch 12/100 900/900 [==============================] - 2s 3ms/step - loss: 2042339712.0000 - val_loss: 2028345728.0000 - lr: 0.0010 Epoch 13/100 900/900 [==============================] - 2s 3ms/step - loss: 1978681600.0000 - val_loss: 1861420288.0000 - lr: 0.0010 Epoch 14/100 900/900 [==============================] - 2s 3ms/step - loss: 1912548992.0000 - val_loss: 1824831616.0000 - lr: 0.0010 Epoch 15/100 900/900 [==============================] - 2s 3ms/step - loss: 1831523200.0000 - val_loss: 1712696320.0000 - lr: 0.0010 Epoch 16/100 900/900 [==============================] - 2s 3ms/step - loss: 1709860864.0000 - val_loss: 1559550848.0000 - lr: 0.0010 Epoch 17/100 900/900 [==============================] - 3s 3ms/step - loss: 1565906944.0000 - val_loss: 1632134784.0000 - lr: 0.0010 Epoch 18/100 900/900 [==============================] - 2s 3ms/step - loss: 1385251456.0000 - val_loss: 1247204224.0000 - lr: 0.0010 Epoch 19/100 900/900 [==============================] - 2s 3ms/step - loss: 1166907392.0000 - val_loss: 1100110208.0000 - lr: 0.0010 Epoch 20/100 900/900 [==============================] - 2s 3ms/step - loss: 1004187968.0000 - val_loss: 967076416.0000 - lr: 0.0010 Epoch 21/100 900/900 [==============================] - 2s 3ms/step - loss: 810782720.0000 - val_loss: 773935616.0000 - lr: 0.0010 Epoch 22/100 900/900 [==============================] - 2s 3ms/step - loss: 674224960.0000 - val_loss: 632815232.0000 - lr: 0.0010 Epoch 23/100 900/900 [==============================] - 4s 5ms/step - loss: 489579744.0000 - val_loss: 362423840.0000 - lr: 0.0010 Epoch 24/100 900/900 [==============================] - 2s 3ms/step - loss: 315362848.0000 - val_loss: 252339072.0000 - lr: 0.0010 Epoch 25/100 900/900 [==============================] - 2s 3ms/step - loss: 229681856.0000 - val_loss: 224926256.0000 - lr: 0.0010 Epoch 26/100 900/900 [==============================] - 2s 3ms/step - loss: 186903232.0000 - val_loss: 145701696.0000 - lr: 0.0010 Epoch 27/100 900/900 [==============================] - 3s 3ms/step - loss: 153437984.0000 - val_loss: 115772624.0000 - lr: 0.0010 Epoch 28/100 900/900 [==============================] - 2s 3ms/step - loss: 116763624.0000 - val_loss: 98385544.0000 - lr: 0.0010 Epoch 29/100 900/900 [==============================] - 2s 3ms/step - loss: 97089600.0000 - val_loss: 83102952.0000 - lr: 0.0010 Epoch 30/100 900/900 [==============================] - 2s 3ms/step - loss: 77944424.0000 - val_loss: 61520092.0000 - lr: 0.0010 Epoch 31/100 900/900 [==============================] - 2s 3ms/step - loss: 72876184.0000 - val_loss: 66571344.0000 - lr: 0.0010 Epoch 32/100 900/900 [==============================] - 2s 3ms/step - loss: 75771832.0000 - val_loss: 425577088.0000 - lr: 0.0010 Epoch 33/100 900/900 [==============================] - 2s 3ms/step - loss: 53201588.0000 - val_loss: 41077088.0000 - lr: 0.0010 Epoch 34/100 900/900 [==============================] - 2s 3ms/step - loss: 55451924.0000 - val_loss: 58675948.0000 - lr: 0.0010 Epoch 35/100 900/900 [==============================] - 2s 3ms/step - loss: 57983348.0000 - val_loss: 40298696.0000 - lr: 0.0010 Epoch 36/100 900/900 [==============================] - 2s 3ms/step - loss: 47533740.0000 - val_loss: 35508004.0000 - lr: 0.0010 Epoch 37/100 900/900 [==============================] - 2s 3ms/step - loss: 52479132.0000 - val_loss: 29065432.0000 - lr: 0.0010 Epoch 38/100 900/900 [==============================] - 2s 3ms/step - loss: 44108308.0000 - val_loss: 55163960.0000 - lr: 0.0010 Epoch 39/100 900/900 [==============================] - 2s 3ms/step - loss: 47892492.0000 - val_loss: 30585124.0000 - lr: 0.0010 Epoch 40/100 900/900 [==============================] - 2s 3ms/step - loss: 40503640.0000 - val_loss: 20862834.0000 - lr: 0.0010 Epoch 41/100 900/900 [==============================] - 2s 3ms/step - loss: 39890224.0000 - val_loss: 28501514.0000 - lr: 0.0010 Epoch 42/100 900/900 [==============================] - 2s 3ms/step - loss: 53116700.0000 - val_loss: 22040958.0000 - lr: 0.0010 Epoch 43/100 900/900 [==============================] - 2s 3ms/step - loss: 31399222.0000 - val_loss: 21801388.0000 - lr: 0.0010 Epoch 44/100 900/900 [==============================] - 2s 3ms/step - loss: 39736132.0000 - val_loss: 20172130.0000 - lr: 0.0010 Epoch 45/100 900/900 [==============================] - 2s 3ms/step - loss: 45849820.0000 - val_loss: 18307038.0000 - lr: 0.0010 Epoch 46/100 900/900 [==============================] - 2s 3ms/step - loss: 34069212.0000 - val_loss: 28955868.0000 - lr: 0.0010 Epoch 47/100 900/900 [==============================] - 2s 3ms/step - loss: 34465424.0000 - val_loss: 20930066.0000 - lr: 0.0010 Epoch 48/100 900/900 [==============================] - 2s 3ms/step - loss: 44199100.0000 - val_loss: 71792760.0000 - lr: 0.0010 Epoch 49/100 900/900 [==============================] - 2s 3ms/step - loss: 33966268.0000 - val_loss: 47495984.0000 - lr: 0.0010 Epoch 50/100 900/900 [==============================] - 2s 3ms/step - loss: 46753780.0000 - val_loss: 34930740.0000 - lr: 0.0010 Epoch 51/100 900/900 [==============================] - 3s 3ms/step - loss: 14294411.0000 - val_loss: 14608778.0000 - lr: 1.0000e-04 Epoch 52/100 900/900 [==============================] - 2s 3ms/step - loss: 14058916.0000 - val_loss: 13611569.0000 - lr: 1.0000e-04 Epoch 53/100 900/900 [==============================] - 2s 3ms/step - loss: 14115471.0000 - val_loss: 14265434.0000 - lr: 1.0000e-04 Epoch 54/100 900/900 [==============================] - 2s 3ms/step - loss: 13966475.0000 - val_loss: 13110502.0000 - lr: 1.0000e-04 Epoch 55/100 900/900 [==============================] - 3s 3ms/step - loss: 13850912.0000 - val_loss: 13086548.0000 - lr: 1.0000e-04 Epoch 56/100 900/900 [==============================] - 2s 3ms/step - loss: 14181619.0000 - val_loss: 13432060.0000 - lr: 1.0000e-04 Epoch 57/100 900/900 [==============================] - 2s 3ms/step - loss: 13733278.0000 - val_loss: 15846159.0000 - lr: 1.0000e-04 Epoch 58/100 900/900 [==============================] - 2s 3ms/step - loss: 13828762.0000 - val_loss: 12763407.0000 - lr: 1.0000e-04 Epoch 59/100 900/900 [==============================] - 2s 3ms/step - loss: 13431568.0000 - val_loss: 12960543.0000 - lr: 1.0000e-04 Epoch 60/100 900/900 [==============================] - 3s 3ms/step - loss: 13620694.0000 - val_loss: 12749395.0000 - lr: 1.0000e-04 Epoch 61/100 900/900 [==============================] - 3s 3ms/step - loss: 13291422.0000 - val_loss: 13369820.0000 - lr: 1.0000e-04 Epoch 62/100 900/900 [==============================] - 2s 3ms/step - loss: 13256623.0000 - val_loss: 12930989.0000 - lr: 1.0000e-04 Epoch 63/100 900/900 [==============================] - 2s 3ms/step - loss: 13111728.0000 - val_loss: 12626547.0000 - lr: 1.0000e-04 Epoch 64/100 900/900 [==============================] - 2s 3ms/step - loss: 13106680.0000 - val_loss: 12028378.0000 - lr: 1.0000e-04 Epoch 65/100 900/900 [==============================] - 3s 3ms/step - loss: 12745654.0000 - val_loss: 14885505.0000 - lr: 1.0000e-04 Epoch 66/100 900/900 [==============================] - 2s 3ms/step - loss: 12938581.0000 - val_loss: 12747204.0000 - lr: 1.0000e-04 Epoch 67/100 900/900 [==============================] - 2s 3ms/step - loss: 12610334.0000 - val_loss: 12144410.0000 - lr: 1.0000e-04 Epoch 68/100 900/900 [==============================] - 2s 3ms/step - loss: 12741362.0000 - val_loss: 11969633.0000 - lr: 1.0000e-04 Epoch 69/100 900/900 [==============================] - 2s 3ms/step - loss: 12679231.0000 - val_loss: 13138680.0000 - lr: 1.0000e-04 Epoch 70/100 900/900 [==============================] - 2s 3ms/step - loss: 12373116.0000 - val_loss: 13003579.0000 - lr: 1.0000e-04 Epoch 71/100 900/900 [==============================] - 3s 3ms/step - loss: 12557367.0000 - val_loss: 14569577.0000 - lr: 1.0000e-04 Epoch 72/100 900/900 [==============================] - 3s 3ms/step - loss: 12270858.0000 - val_loss: 11363238.0000 - lr: 1.0000e-04 Epoch 73/100 900/900 [==============================] - 2s 3ms/step - loss: 12382605.0000 - val_loss: 12228745.0000 - lr: 1.0000e-04 Epoch 74/100 900/900 [==============================] - 2s 3ms/step - loss: 11774753.0000 - val_loss: 11583488.0000 - lr: 1.0000e-04 Epoch 75/100 900/900 [==============================] - 3s 3ms/step - loss: 12066925.0000 - val_loss: 13405797.0000 - lr: 1.0000e-04 Epoch 76/100 900/900 [==============================] - 2s 3ms/step - loss: 12046363.0000 - val_loss: 11316186.0000 - lr: 1.0000e-04 Epoch 77/100 900/900 [==============================] - 2s 3ms/step - loss: 11822198.0000 - val_loss: 12414644.0000 - lr: 1.0000e-04 Epoch 78/100 900/900 [==============================] - 2s 3ms/step - loss: 11812789.0000 - val_loss: 11596154.0000 - lr: 1.0000e-04 Epoch 79/100 900/900 [==============================] - 2s 3ms/step - loss: 11484913.0000 - val_loss: 11662296.0000 - lr: 1.0000e-04 Epoch 80/100 900/900 [==============================] - 2s 3ms/step - loss: 11606447.0000 - val_loss: 10772442.0000 - lr: 1.0000e-04 Epoch 81/100 900/900 [==============================] - 2s 3ms/step - loss: 11493557.0000 - val_loss: 12126102.0000 - lr: 1.0000e-04 Epoch 82/100 900/900 [==============================] - 3s 3ms/step - loss: 11349547.0000 - val_loss: 11174807.0000 - lr: 1.0000e-04 Epoch 83/100 900/900 [==============================] - 2s 3ms/step - loss: 11343238.0000 - val_loss: 14066624.0000 - lr: 1.0000e-04 Epoch 84/100 900/900 [==============================] - 3s 3ms/step - loss: 11355988.0000 - val_loss: 10877719.0000 - lr: 1.0000e-04 Epoch 85/100 900/900 [==============================] - 2s 3ms/step - loss: 11314851.0000 - val_loss: 12046778.0000 - lr: 1.0000e-04 Epoch 86/100 900/900 [==============================] - 2s 3ms/step - loss: 11047119.0000 - val_loss: 10231346.0000 - lr: 1.0000e-04 Epoch 87/100 900/900 [==============================] - 2s 3ms/step - loss: 11070084.0000 - val_loss: 11885878.0000 - lr: 1.0000e-04 Epoch 88/100 900/900 [==============================] - 3s 3ms/step - loss: 10950543.0000 - val_loss: 11589425.0000 - lr: 1.0000e-04 Epoch 89/100 900/900 [==============================] - 2s 3ms/step - loss: 10752402.0000 - val_loss: 12474972.0000 - lr: 1.0000e-04 Epoch 90/100 900/900 [==============================] - 3s 3ms/step - loss: 10876344.0000 - val_loss: 11175237.0000 - lr: 1.0000e-04 Epoch 91/100 900/900 [==============================] - 2s 3ms/step - loss: 11062686.0000 - val_loss: 10378333.0000 - lr: 1.0000e-04 Epoch 92/100 900/900 [==============================] - 3s 3ms/step - loss: 10724464.0000 - val_loss: 13879649.0000 - lr: 1.0000e-04 Epoch 93/100 900/900 [==============================] - 3s 3ms/step - loss: 10606362.0000 - val_loss: 10037036.0000 - lr: 1.0000e-04 Epoch 94/100 900/900 [==============================] - 3s 3ms/step - loss: 10667357.0000 - val_loss: 12152709.0000 - lr: 1.0000e-04 Epoch 95/100 900/900 [==============================] - 2s 3ms/step - loss: 10498846.0000 - val_loss: 11792492.0000 - lr: 1.0000e-04 Epoch 96/100 900/900 [==============================] - 2s 3ms/step - loss: 10499864.0000 - val_loss: 9957984.0000 - lr: 1.0000e-04 Epoch 97/100 900/900 [==============================] - 2s 3ms/step - loss: 10567239.0000 - val_loss: 11616033.0000 - lr: 1.0000e-04 Epoch 98/100 900/900 [==============================] - 2s 3ms/step - loss: 10467988.0000 - val_loss: 10123821.0000 - lr: 1.0000e-04 Epoch 99/100 900/900 [==============================] - 3s 3ms/step - loss: 10341040.0000 - val_loss: 10067432.0000 - lr: 1.0000e-04 Epoch 100/100 900/900 [==============================] - 3s 3ms/step - loss: 10711256.0000 - val_loss: 11243690.0000 - lr: 1.0000e-04
fig, ax = plt.subplots(figsize=(10, 7))
ax.plot(history.history['loss'], label='Training Loss')
ax.plot(history.history['val_loss'], label='Validation Loss')
ax.set(
title='Processo de otimização das funções de perda',
ylabel='Loss',
xlabel='Epoch'
)
plt.legend()
plt.tight_layout()
plt.show()
fig, ax = plt.subplots(figsize=(10, 7), subplot_kw=dict(projection='3d'))
ax.plot_wireframe(x1, x2, z, linewidths=0.5, color='lightgrey')
ax.scatter(x_test[:,0], x_test[:,1], y_test, s=14, color='C0', label='Training data')
ax.scatter(x_test[:,0], x_test[:,1], mlp.predict(x_test), s=15, marker='^', color='C1', label='Test data')
ax.set(
xlabel='$x_1$',
ylabel='$x_2$',
zlabel='$f(x_1, x_2)$'
)
plt.legend()
plt.tight_layout()
plt.show()
63/63 [==============================] - 0s 2ms/step
b) $f(x_1, x_2) = x_1^2 + x_2^2 + 2x_1x_2 cos(\pi x_1 x_2) + x_1 +x_2 - 1 \hspace{1cm} |x_1| \leq 1, |x_2| \leq 1$
# b
x1, x2 = np.meshgrid(np.linspace(-1, 1, 100), np.linspace(-1, 1, 100))
z = x1**2 + x2**2 + 2 * x1 * x2 * np.cos(np.pi * x1 * x2) + x1 + x2 - 1
# graphic b
fig, ax = plt.subplots(figsize=(10, 7), subplot_kw=dict(projection='3d'))
ax.plot_surface(x1, x2, z)
ax.set(
xlabel='$x_1$',
ylabel='$x_2$',
zlabel='$f(x_1, x_2)$'
)
plt.tight_layout()
plt.show()
# split train and test
x_train, x_test, y_train, y_test = train_test_split(
np.vstack([x1.flatten(), x2.flatten()]).T,
z.flatten(),
test_size=0.2,
random_state=505
)
# graphic before train_test_split
fig, ax = plt.subplots(figsize=(10, 7), subplot_kw=dict(projection='3d'))
ax.plot_wireframe(x1, x2, z, linewidths=0.5, color='lightgrey')
ax.scatter(x_train[:,0], x_train[:,1], y_train, s=1, color='darkorange', label='Training data')
ax.scatter(x_test[:,0], x_test[:,1], y_test, s=5, color='darkgreen', label='Test data')
ax.set(
xlabel='$x_1$',
ylabel='$x_2$',
zlabel='$f(x_1, x_2)$'
)
plt.legend()
plt.tight_layout()
plt.show()
# mlp model
mlp = Sequential([
Dense(64, activation='relu', input_shape=(2,)),
Dense(32, activation='relu'),
Dense(16, activation='relu'),
Dense(8, activation='relu'),
Dense(4, activation='relu'),
Dense(1, activation='linear')
])
mlp.compile(
loss='mean_squared_error',
optimizer='adam'
)
mlp.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense_6 (Dense) (None, 64) 192
dense_7 (Dense) (None, 32) 2080
dense_8 (Dense) (None, 16) 528
dense_9 (Dense) (None, 8) 136
dense_10 (Dense) (None, 4) 36
dense_11 (Dense) (None, 1) 5
=================================================================
Total params: 2,977
Trainable params: 2,977
Non-trainable params: 0
_________________________________________________________________
history = mlp.fit(
x_train, y_train,
batch_size=8,
epochs=100,
validation_split=0.1,
callbacks=[
tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10),
tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=5, min_lr=0.0001)
]
)
Epoch 1/100 900/900 [==============================] - 3s 3ms/step - loss: 0.1518 - val_loss: 0.0380 - lr: 0.0010 Epoch 2/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0230 - val_loss: 0.0202 - lr: 0.0010 Epoch 3/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0119 - val_loss: 0.0107 - lr: 0.0010 Epoch 4/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0061 - val_loss: 0.0037 - lr: 0.0010 Epoch 5/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0049 - val_loss: 0.0021 - lr: 0.0010 Epoch 6/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0047 - val_loss: 8.8958e-04 - lr: 0.0010 Epoch 7/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0022 - val_loss: 0.0017 - lr: 0.0010 Epoch 8/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0037 - val_loss: 0.0038 - lr: 0.0010 Epoch 9/100 900/900 [==============================] - 3s 3ms/step - loss: 0.0023 - val_loss: 6.6980e-04 - lr: 0.0010 Epoch 10/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0018 - val_loss: 0.0017 - lr: 0.0010 Epoch 11/100 900/900 [==============================] - 3s 3ms/step - loss: 0.0031 - val_loss: 7.6547e-04 - lr: 0.0010 Epoch 12/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0020 - val_loss: 0.0051 - lr: 0.0010 Epoch 13/100 900/900 [==============================] - 3s 3ms/step - loss: 0.0021 - val_loss: 9.9872e-04 - lr: 0.0010 Epoch 14/100 900/900 [==============================] - 2s 3ms/step - loss: 0.0017 - val_loss: 0.0046 - lr: 0.0010 Epoch 15/100 900/900 [==============================] - 2s 3ms/step - loss: 2.7343e-04 - val_loss: 2.3635e-04 - lr: 1.0000e-04 Epoch 16/100 900/900 [==============================] - 3s 3ms/step - loss: 1.6806e-04 - val_loss: 2.0181e-04 - lr: 1.0000e-04 Epoch 17/100 900/900 [==============================] - 3s 3ms/step - loss: 1.5988e-04 - val_loss: 2.3717e-04 - lr: 1.0000e-04 Epoch 18/100 900/900 [==============================] - 2s 3ms/step - loss: 1.6713e-04 - val_loss: 2.4845e-04 - lr: 1.0000e-04 Epoch 19/100 900/900 [==============================] - 2s 3ms/step - loss: 1.6860e-04 - val_loss: 3.0091e-04 - lr: 1.0000e-04 Epoch 20/100 900/900 [==============================] - 2s 3ms/step - loss: 1.5816e-04 - val_loss: 1.9675e-04 - lr: 1.0000e-04 Epoch 21/100 900/900 [==============================] - 2s 3ms/step - loss: 1.6376e-04 - val_loss: 1.3812e-04 - lr: 1.0000e-04 Epoch 22/100 900/900 [==============================] - 2s 3ms/step - loss: 1.4988e-04 - val_loss: 1.5819e-04 - lr: 1.0000e-04 Epoch 23/100 900/900 [==============================] - 2s 3ms/step - loss: 1.4492e-04 - val_loss: 1.5283e-04 - lr: 1.0000e-04 Epoch 24/100 900/900 [==============================] - 2s 3ms/step - loss: 1.4855e-04 - val_loss: 1.4488e-04 - lr: 1.0000e-04 Epoch 25/100 900/900 [==============================] - 2s 3ms/step - loss: 1.3759e-04 - val_loss: 1.5664e-04 - lr: 1.0000e-04 Epoch 26/100 900/900 [==============================] - 2s 3ms/step - loss: 1.2803e-04 - val_loss: 1.2339e-04 - lr: 1.0000e-04 Epoch 27/100 900/900 [==============================] - 2s 3ms/step - loss: 1.2851e-04 - val_loss: 1.4287e-04 - lr: 1.0000e-04 Epoch 28/100 900/900 [==============================] - 2s 3ms/step - loss: 1.3407e-04 - val_loss: 1.2989e-04 - lr: 1.0000e-04 Epoch 29/100 900/900 [==============================] - 2s 3ms/step - loss: 1.2344e-04 - val_loss: 1.7907e-04 - lr: 1.0000e-04 Epoch 30/100 900/900 [==============================] - 2s 3ms/step - loss: 1.3005e-04 - val_loss: 1.0765e-04 - lr: 1.0000e-04 Epoch 31/100 900/900 [==============================] - 2s 3ms/step - loss: 1.2049e-04 - val_loss: 1.4330e-04 - lr: 1.0000e-04 Epoch 32/100 900/900 [==============================] - 2s 3ms/step - loss: 1.2743e-04 - val_loss: 1.1965e-04 - lr: 1.0000e-04 Epoch 33/100 900/900 [==============================] - 2s 3ms/step - loss: 1.0889e-04 - val_loss: 1.1838e-04 - lr: 1.0000e-04 Epoch 34/100 900/900 [==============================] - 2s 3ms/step - loss: 1.0991e-04 - val_loss: 1.0851e-04 - lr: 1.0000e-04 Epoch 35/100 900/900 [==============================] - 2s 3ms/step - loss: 1.1393e-04 - val_loss: 1.0683e-04 - lr: 1.0000e-04 Epoch 36/100 900/900 [==============================] - 2s 3ms/step - loss: 1.1497e-04 - val_loss: 1.2999e-04 - lr: 1.0000e-04 Epoch 37/100 900/900 [==============================] - 2s 3ms/step - loss: 1.0718e-04 - val_loss: 1.3468e-04 - lr: 1.0000e-04 Epoch 38/100 900/900 [==============================] - 3s 3ms/step - loss: 9.9634e-05 - val_loss: 1.1511e-04 - lr: 1.0000e-04 Epoch 39/100 900/900 [==============================] - 2s 3ms/step - loss: 1.2144e-04 - val_loss: 1.2099e-04 - lr: 1.0000e-04 Epoch 40/100 900/900 [==============================] - 2s 3ms/step - loss: 1.0714e-04 - val_loss: 2.0053e-04 - lr: 1.0000e-04 Epoch 41/100 900/900 [==============================] - 2s 3ms/step - loss: 1.1940e-04 - val_loss: 2.1926e-04 - lr: 1.0000e-04 Epoch 42/100 900/900 [==============================] - 3s 3ms/step - loss: 1.0226e-04 - val_loss: 1.2323e-04 - lr: 1.0000e-04 Epoch 43/100 900/900 [==============================] - 2s 3ms/step - loss: 1.0285e-04 - val_loss: 1.1406e-04 - lr: 1.0000e-04 Epoch 44/100 900/900 [==============================] - 2s 3ms/step - loss: 1.0296e-04 - val_loss: 1.1210e-04 - lr: 1.0000e-04 Epoch 45/100 900/900 [==============================] - 2s 3ms/step - loss: 1.0196e-04 - val_loss: 1.3659e-04 - lr: 1.0000e-04
fig, ax = plt.subplots(figsize=(10, 7))
ax.plot(history.history['loss'], label='Training Loss')
ax.plot(history.history['val_loss'], label='Validation Loss')
ax.set(
title='Processo de otimização das funções de perda',
ylabel='Loss',
xlabel='Epoch'
)
plt.legend()
plt.tight_layout()
plt.show()
fig, ax = plt.subplots(figsize=(10, 7), subplot_kw=dict(projection='3d'))
ax.plot_wireframe(x1, x2, z, linewidths=0.5, color='lightgrey')
ax.scatter(x_test[:,0], x_test[:,1], y_test, s=14, color='C0', label='Training data')
ax.scatter(x_test[:,0], x_test[:,1], mlp.predict(x_test), s=15, marker='^', color='C1', label='Test data')
ax.set(
xlabel='$x_1$',
ylabel='$x_2$',
zlabel='$f(x_1, x_2)$'
)
plt.legend()
plt.tight_layout()
plt.show()
63/63 [==============================] - 0s 907us/step
# classes
def c1(x, y):
return 1 if (x + 1)**2 + y**2 <= 1 == True else 0
def c2(x, y):
return 1 if (x - 1)**2 + y**2 <= 1 == True else 0
def c3(x, y):
return 1 if x**2 + (y + 1)**2 <= 1 == True else 0
def c4(x, y):
return 1 if x**2 + (y - 1)**2 <= 1 == True else 0
x, y = np.meshgrid(np.linspace(-1, 1, 100), np.linspace(-1, 1, 100))
labels = []
points = np.vstack(list(zip(x.ravel(), y.ravel())))
for x_i, y_i in points:
if (c1(x_i, y_i) + c3(x_i, y_i))>=2: # third q
labels.append(1)
elif (c2(x_i, y_i) + c4(x_i, y_i))>=2: # first q
labels.append(2)
elif (c3(x_i, y_i) + c2(x_i, y_i))>=2:
labels.append(3)
elif (c1(x_i, y_i) + c4(x_i, y_i))>=2: # second q
labels.append(4)
else:
labels.append(0)
labels = np.array(labels)
1 in labels
True
2 in labels
True
3 in labels
True
4 in labels
True
3 in labels
True
x_train, x_test, y_train, y_test = train_test_split(points, labels, test_size=0.2, stratify=labels)
fig, ax = plt.subplots(ncols=3, figsize=(47, 15))
cmap = np.array([(3/255, 200/255, 133/255),
(253/255, 250/255, 57/255),
(1/255, 225/255, 218/255),
(128/255, 235/255, 97/255),
(207/255, 139/255, 251/255)])
dataset_scatter = ax[0].scatter(points[:,0], points[:,1], c=cmap[labels], marker='.')
ax[0].set(
title='Dataset',
xlabel='$x$',
ylabel='$y$'
)
ax[1].scatter(x_train[:,0], x_train[:,1], c=cmap[y_train], marker='d')
ax[1].set(
title='Training set',
xlabel='$x$',
ylabel='$y$'
)
ax[2].scatter(x_test[:,0], x_test[:,1], c=cmap[y_test], marker='^')
ax[2].set(
title='Test set',
xlabel='$x$',
ylabel='$y$'
)
from matplotlib.patches import Patch
from matplotlib.lines import Line2D
legend_elements = [
Line2D([0], [0], marker='o', color='w', label='Scatter', markerfacecolor=(3/255, 200/255, 133/255), markersize=15),
Line2D([0], [0], marker='o', color='w', label='Scatter', markerfacecolor=(255/255, 245/255, 140/255), markersize=15),
Line2D([0], [0], marker='o', color='w', label='Scatter', markerfacecolor=(1/255, 225/255, 218/255), markersize=15),
Line2D([0], [0], marker='o', color='w', label='Scatter', markerfacecolor=(128/255, 235/255, 97/255), markersize=15),
Line2D([0], [0], marker='o', color='w', label='Scatter', markerfacecolor=(207/255, 139/255, 251/255), markersize=15)
]
fig.legend(
legend_elements,
['0', '1','2','3','4'],
loc='lower center',
title='Classes'
)
plt.show()
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz 170498071/170498071 [==============================] - 3s 0us/step
mlb = MultiLabelBinarizer()
y_train = mlb.fit_transform(y_train.reshape((-1,1)))
input_shape = x_train.shape[1:]
n_classes = y_train[0].shape[0]
classes = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
len(classes)
10
fig, ax = plt.subplots(ncols=5, figsize=(30, 5))
for idx, image in enumerate(x_train[:5]):
ax[idx].imshow(image, cmap='gray')
ax[idx].axis('off')
ax[idx].set_title(f'{classes[np.argmax(y_train[idx])]}')
plt.tight_layout()
plt.show()
resnet50 = ResNet50(weights='imagenet', include_top=False, input_shape=input_shape)
for layer in resnet50.layers:
layer.trainable = True
X = resnet50.output
X = Flatten()(X)
X = Dense(512, kernel_initializer='he_uniform')(X)
X = Dropout(.2)(X)
X = BatchNormalization()(X)
X = Activation('relu')(X)
# X = Dense(256, kernel_initializer='he_uniform')(X)
# X = Dropout(.2)(X)
# X = BatchNormalization()(X)
# X = Activation('relu')(X)
# X = Dense(128, kernel_initializer='he_uniform')(X)
# X = Dropout(.2)(X)
# X = BatchNormalization()(X)
# X = Activation('relu')(X)
X = Dense(64, kernel_initializer='he_uniform')(X)
X = Dropout(.2)(X)
X = BatchNormalization()(X)
X = Activation('relu')(X)
output = Dense(n_classes, activation='softmax')(X)
resnet50_model = Model(inputs=resnet50.input, outputs=output)
resnet50_model.compile(
loss='categorical_crossentropy',
optimizer=Adam(learning_rate=0.0001),
metrics=['acc']
)
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5 94765736/94765736 [==============================] - 0s 0us/step
# Train the whole network
history1 = resnet50_model.fit(
x_train, y_train,
validation_split=0.1,
batch_size=64,
epochs=20,
shuffle=True,
callbacks=[
ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, min_lr=0.0001)
]
)
Epoch 1/20 704/704 [==============================] - 50s 49ms/step - loss: 1.4019 - acc: 0.5329 - val_loss: 0.8403 - val_acc: 0.7238 - lr: 1.0000e-04 Epoch 2/20 704/704 [==============================] - 32s 45ms/step - loss: 0.7921 - acc: 0.7495 - val_loss: 0.6626 - val_acc: 0.7820 - lr: 1.0000e-04 Epoch 3/20 704/704 [==============================] - 32s 46ms/step - loss: 0.5648 - acc: 0.8258 - val_loss: 0.6356 - val_acc: 0.7936 - lr: 1.0000e-04 Epoch 4/20 704/704 [==============================] - 33s 47ms/step - loss: 0.4230 - acc: 0.8715 - val_loss: 0.6354 - val_acc: 0.7986 - lr: 1.0000e-04 Epoch 5/20 704/704 [==============================] - 32s 46ms/step - loss: 0.3266 - acc: 0.9018 - val_loss: 0.6337 - val_acc: 0.8062 - lr: 1.0000e-04 Epoch 6/20 704/704 [==============================] - 32s 46ms/step - loss: 0.2624 - acc: 0.9222 - val_loss: 0.6401 - val_acc: 0.8022 - lr: 1.0000e-04 Epoch 7/20 704/704 [==============================] - 32s 46ms/step - loss: 0.1989 - acc: 0.9407 - val_loss: 0.6279 - val_acc: 0.8216 - lr: 1.0000e-04 Epoch 8/20 704/704 [==============================] - 32s 46ms/step - loss: 0.1612 - acc: 0.9521 - val_loss: 0.6674 - val_acc: 0.8186 - lr: 1.0000e-04 Epoch 9/20 704/704 [==============================] - 32s 46ms/step - loss: 0.1294 - acc: 0.9613 - val_loss: 0.7277 - val_acc: 0.8110 - lr: 1.0000e-04 Epoch 10/20 704/704 [==============================] - 32s 45ms/step - loss: 0.1160 - acc: 0.9654 - val_loss: 0.7471 - val_acc: 0.8110 - lr: 1.0000e-04 Epoch 11/20 704/704 [==============================] - 32s 46ms/step - loss: 0.1060 - acc: 0.9689 - val_loss: 0.7070 - val_acc: 0.8216 - lr: 1.0000e-04 Epoch 12/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0939 - acc: 0.9715 - val_loss: 0.7056 - val_acc: 0.8226 - lr: 1.0000e-04 Epoch 13/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0796 - acc: 0.9760 - val_loss: 0.7523 - val_acc: 0.8146 - lr: 1.0000e-04 Epoch 14/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0782 - acc: 0.9762 - val_loss: 0.7891 - val_acc: 0.8160 - lr: 1.0000e-04 Epoch 15/20 704/704 [==============================] - 32s 45ms/step - loss: 0.0736 - acc: 0.9771 - val_loss: 0.7441 - val_acc: 0.8230 - lr: 1.0000e-04 Epoch 16/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0646 - acc: 0.9803 - val_loss: 0.7993 - val_acc: 0.8128 - lr: 1.0000e-04 Epoch 17/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0742 - acc: 0.9771 - val_loss: 0.7152 - val_acc: 0.8254 - lr: 1.0000e-04 Epoch 18/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0501 - acc: 0.9851 - val_loss: 0.7482 - val_acc: 0.8292 - lr: 1.0000e-04 Epoch 19/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0524 - acc: 0.9839 - val_loss: 0.7492 - val_acc: 0.8300 - lr: 1.0000e-04 Epoch 20/20 704/704 [==============================] - 32s 46ms/step - loss: 0.0569 - acc: 0.9830 - val_loss: 0.7225 - val_acc: 0.8334 - lr: 1.0000e-04
# Freeze core ResNet layers and train again
for layer in resnet50_model.layers:
layer.trainable = False
for layer in resnet50_model.layers[:200]:
layer.trainable = True
resnet50_model.compile(
loss='categorical_crossentropy',
optimizer=Adam(learning_rate=0.0001),
metrics=['acc']
)
history2 = resnet50_model.fit(
x_train, y_train,
validation_split=0.1,
batch_size=64,
epochs=1000,
shuffle=True,
callbacks=[
EarlyStopping(monitor='val_loss', patience=10),
ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=5, min_lr=0.0001)
]
)
Epoch 1/1000 704/704 [==============================] - 39s 48ms/step - loss: 0.0588 - acc: 0.9824 - val_loss: 0.7987 - val_acc: 0.8250 - lr: 1.0000e-04 Epoch 2/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0491 - acc: 0.9849 - val_loss: 0.8296 - val_acc: 0.8186 - lr: 1.0000e-04 Epoch 3/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0548 - acc: 0.9836 - val_loss: 0.7779 - val_acc: 0.8282 - lr: 1.0000e-04 Epoch 4/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0424 - acc: 0.9872 - val_loss: 0.8316 - val_acc: 0.8296 - lr: 1.0000e-04 Epoch 5/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0441 - acc: 0.9869 - val_loss: 0.8410 - val_acc: 0.8206 - lr: 1.0000e-04 Epoch 6/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0489 - acc: 0.9858 - val_loss: 0.7811 - val_acc: 0.8364 - lr: 1.0000e-04 Epoch 7/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0429 - acc: 0.9872 - val_loss: 0.7876 - val_acc: 0.8358 - lr: 1.0000e-04 Epoch 8/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0413 - acc: 0.9879 - val_loss: 0.8262 - val_acc: 0.8262 - lr: 1.0000e-04 Epoch 9/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0468 - acc: 0.9863 - val_loss: 0.7940 - val_acc: 0.8292 - lr: 1.0000e-04 Epoch 10/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0266 - acc: 0.9921 - val_loss: 0.8151 - val_acc: 0.8322 - lr: 1.0000e-04 Epoch 11/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0305 - acc: 0.9912 - val_loss: 0.8717 - val_acc: 0.8240 - lr: 1.0000e-04 Epoch 12/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0418 - acc: 0.9880 - val_loss: 0.8380 - val_acc: 0.8224 - lr: 1.0000e-04 Epoch 13/1000 704/704 [==============================] - 32s 46ms/step - loss: 0.0378 - acc: 0.9886 - val_loss: 0.9327 - val_acc: 0.8090 - lr: 1.0000e-04
# Merge history1 and history2
history = {
'loss': history1.history['loss'] + history2.history['loss'],
'acc': history1.history['acc'] + history2.history['acc'],
'val_loss': history1.history['val_loss'] + history2.history['val_loss'],
'val_acc': history1.history['val_acc'] + history2.history['val_acc']
}
fig, ax = plt.subplots(ncols=2, figsize=(16, 6))
ax[0].plot(history['loss'], label='Training loss')
ax[0].plot(history['val_loss'], label='Validation loss')
ax[0].axvline(x=10, color='k', linestyle='--')
ax[0].legend()
ax[0].set(
ylabel='Loss',
xlabel='Epoch'
)
ax[1].plot(history['acc'], label='Training accuracy')
ax[1].plot(history['val_acc'], label='Validation accuracy')
ax[1].axvline(x=10, color='k', linestyle='--')
ax[1].set(
ylabel='Accuracy',
xlabel='Epoch'
)
plt.legend()
plt.tight_layout()
plt.show()
y_pred = [ np.argmax(pred) for pred in resnet50_model.predict(x_test) ]
313/313 [==============================] - 4s 11ms/step
print(classification_report(y_test, y_pred))
precision recall f1-score support
0 0.86 0.83 0.85 1000
1 0.87 0.90 0.89 1000
2 0.81 0.76 0.78 1000
3 0.69 0.66 0.67 1000
4 0.77 0.85 0.81 1000
5 0.74 0.76 0.75 1000
6 0.81 0.90 0.85 1000
7 0.90 0.82 0.86 1000
8 0.89 0.90 0.90 1000
9 0.89 0.84 0.86 1000
accuracy 0.82 10000
macro avg 0.82 0.82 0.82 10000
weighted avg 0.82 0.82 0.82 10000
One step prediction $x^{(n+1)}$ from the time series $x(n) = 1 + cos(n + cos^2(n))$, where $n=0,1,2,3,...$
n = np.linspace(0, 100, 10000)
y = 1 + np.cos(n + np.cos(n)**2)
X_train, X_test, y_train, y_test = train_test_split(n, y, random_state=33, test_size=0.25)
print(f'Shape X_train = {X_train.shape}')
print(f'Shape y_train = {y_train.shape}')
print(f'Shape X_test = {X_test.shape}')
print(f'Shape y_test = {y_test.shape}')
Shape X_train = (7500,) Shape y_train = (7500,) Shape X_test = (2500,) Shape y_test = (2500,)
fig, ax = plt.subplots(figsize=(14,9))
ax.scatter(X_train, y_train)
ax.scatter(X_test, y_test)
plt.show()
sequence_x = list(TimeseriesGenerator(X_train, X_train, 4, batch_size=1))
sequence_y = list(TimeseriesGenerator(X_train[4:], X_train[4:], 3, batch_size=1))
train_seqs = []
y_train = []
for (x_seq, next_x), (next_seq, _) in zip(sequence_x, sequence_y):
seq = np.append(x_seq.reshape(4,), next_seq.reshape(3, ))
train_seqs.append(seq)
y_train.append(next_x)
train_seqs = np.array(train_seqs)
y_train = np.array(y_train)
sequence_x = list(TimeseriesGenerator(X_test, X_test, 4, batch_size=1))
sequence_y = list(TimeseriesGenerator(X_test[4:], X_test[4:], 3, batch_size=1))
test_seqs = []
y_test = []
for (x_seq, next_x), (next_seq, _) in zip(sequence_x, sequence_y):
seq = np.append(x_seq.reshape(4,), next_seq.reshape(3, ))
test_seqs.append(seq)
y_test.append(next_x)
test_seqs = np.array(test_seqs)
y_test = np.array(y_test)
model = Sequential([
LSTM(128, input_shape=(7, 1), return_sequences=True),
LSTM(64),
Dense(1)
])
model.compile(loss="mean_squared_error", optimizer="adam")
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm (LSTM) (None, 7, 128) 66560
lstm_1 (LSTM) (None, 64) 49408
dense (Dense) (None, 1) 65
=================================================================
Total params: 116,033
Trainable params: 116,033
Non-trainable params: 0
_________________________________________________________________
history = model.fit(
train_seqs, y_train,
validation_split=0.1,
batch_size=8,
epochs=100,
shuffle=True,
callbacks=[
EarlyStopping(monitor='val_loss', patience=5),
ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=5, min_lr=0.0001)
]
)
Epoch 1/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0411 - val_loss: 0.0396 - lr: 1.0000e-04 Epoch 2/100 843/843 [==============================] - 11s 14ms/step - loss: 0.0272 - val_loss: 0.0300 - lr: 1.0000e-04 Epoch 3/100 843/843 [==============================] - 11s 14ms/step - loss: 0.0231 - val_loss: 0.0279 - lr: 1.0000e-04 Epoch 4/100 843/843 [==============================] - 16s 19ms/step - loss: 0.0223 - val_loss: 0.0248 - lr: 1.0000e-04 Epoch 5/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0215 - val_loss: 0.0282 - lr: 1.0000e-04 Epoch 6/100 843/843 [==============================] - 11s 13ms/step - loss: 0.0189 - val_loss: 0.0209 - lr: 1.0000e-04 Epoch 7/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0173 - val_loss: 0.0246 - lr: 1.0000e-04 Epoch 8/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0169 - val_loss: 0.0255 - lr: 1.0000e-04 Epoch 9/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0158 - val_loss: 0.0131 - lr: 1.0000e-04 Epoch 10/100 843/843 [==============================] - 15s 18ms/step - loss: 0.0141 - val_loss: 0.0170 - lr: 1.0000e-04 Epoch 11/100 843/843 [==============================] - 11s 13ms/step - loss: 0.0146 - val_loss: 0.0210 - lr: 1.0000e-04 Epoch 12/100 843/843 [==============================] - 11s 13ms/step - loss: 0.0134 - val_loss: 0.0141 - lr: 1.0000e-04 Epoch 13/100 843/843 [==============================] - 13s 16ms/step - loss: 0.0140 - val_loss: 0.0125 - lr: 1.0000e-04 Epoch 14/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0112 - val_loss: 0.0133 - lr: 1.0000e-04 Epoch 15/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0119 - val_loss: 0.0103 - lr: 1.0000e-04 Epoch 16/100 843/843 [==============================] - 11s 13ms/step - loss: 0.0109 - val_loss: 0.0182 - lr: 1.0000e-04 Epoch 17/100 843/843 [==============================] - 11s 13ms/step - loss: 0.0116 - val_loss: 0.0102 - lr: 1.0000e-04 Epoch 18/100 843/843 [==============================] - 11s 13ms/step - loss: 0.0114 - val_loss: 0.0155 - lr: 1.0000e-04 Epoch 19/100 843/843 [==============================] - 11s 14ms/step - loss: 0.0108 - val_loss: 0.0176 - lr: 1.0000e-04 Epoch 20/100 843/843 [==============================] - 11s 14ms/step - loss: 0.0091 - val_loss: 0.0106 - lr: 1.0000e-04 Epoch 21/100 843/843 [==============================] - 11s 14ms/step - loss: 0.0109 - val_loss: 0.0319 - lr: 1.0000e-04 Epoch 22/100 843/843 [==============================] - 12s 14ms/step - loss: 0.0087 - val_loss: 0.0112 - lr: 1.0000e-04
fig, ax = plt.subplots(figsize=(8, 6))
ax.plot(history.history['loss'], label='Training loss')
ax.plot(history.history['val_loss'], label='Validation loss')
ax.legend()
ax.set(
ylabel='Loss',
xlabel='Epoch'
)
plt.legend()
plt.tight_layout()
plt.show()
y_pred = model.predict(test_seqs)
78/78 [==============================] - 1s 8ms/step
fig, axes = plt.subplots(ncols=2, figsize=(25, 6))
axes[0].plot(y_test, marker='.', label='Groundtruth')
axes[0].scatter(
range(len(y_pred)), y_pred,
marker='X', edgecolors='k',
label='Predictions', c='#ff7f0e',
s=64
)
axes[0].legend()
axes[0].set(
ylabel='$\hat{x}(n)$',
xlabel='$n$'
)
axes[1].plot(y_test[:50], marker='.', label='Groundtruth')
axes[1].scatter(
range(len(y_pred[:50])), y_pred[:50],
marker='X', edgecolors='k',
label='Predictions', c='#ff7f0e',
s=64
)
axes[1].legend()
axes[1].set(
ylabel='$\hat{x}(n)$',
xlabel='$n$'
)
plt.tight_layout()
plt.show()
<>:13: DeprecationWarning: invalid escape sequence \h
Use an autoeconder network to reduce the dimensionality of data to two dimensions.
m1 = np.zeros(8)
m2 = np.array([4,0,0,0,0,0,0,0])
m3 = np.array([0,0,0,4,0,0,0,0])
m4 = np.array([0,0,0,0,0,0,0,4])
v = np.eye(8)
x1 = np.random.multivariate_normal(m1, v, 1500)
x2 = np.random.multivariate_normal(m2, v, 1500)
x3 = np.random.multivariate_normal(m3, v, 1500)
x4 = np.random.multivariate_normal(m4, v, 1500)
# autoencoder network
input_size = Input(shape=(8,))
encoder = Dense(512, activation='leaky_relu')(input_size)
encoder = Dense(512, activation='leaky_relu')(encoder)
encoder = Dropout(0.1)(encoder)
encoder = Dense(8, activation='tanh')(encoder)
decoder = Dense(512, activation='leaky_relu')(encoder)
decoder = Dropout(0.1)(decoder)
decoder = Dense(8, activation='tanh')(decoder)
model_autoencoder = Model(input_size, decoder)
model_encoder = Model(input_size, encoder)
model_autoencoder.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=0.0001),
loss='mse',
)
model_autoencoder.fit(x1, x1, epochs=70, validation_split=0.1, shuffle=True)
model_autoencoder.fit(x2, x2, epochs=70, validation_split=0.1, shuffle=True)
model_autoencoder.fit(x3, x3, epochs=70, validation_split=0.1, shuffle=True)
model_autoencoder.fit(x4, x4, epochs=70, validation_split=0.1, shuffle=True)
Epoch 1/70 43/43 [==============================] - 2s 12ms/step - loss: 0.2024 - val_loss: 0.1695 Epoch 2/70 43/43 [==============================] - 0s 7ms/step - loss: 0.1926 - val_loss: 0.1650 Epoch 3/70 43/43 [==============================] - 0s 8ms/step - loss: 0.1883 - val_loss: 0.1618 Epoch 4/70 43/43 [==============================] - 0s 7ms/step - loss: 0.1861 - val_loss: 0.1594 Epoch 5/70 43/43 [==============================] - 0s 6ms/step - loss: 0.1833 - val_loss: 0.1581 Epoch 6/70 43/43 [==============================] - 0s 9ms/step - loss: 0.1823 - val_loss: 0.1570 Epoch 7/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1812 - val_loss: 0.1559 Epoch 8/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1799 - val_loss: 0.1549 Epoch 9/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1792 - val_loss: 0.1544 Epoch 10/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1780 - val_loss: 0.1532 Epoch 11/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1773 - val_loss: 0.1533 Epoch 12/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1767 - val_loss: 0.1525 Epoch 13/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1761 - val_loss: 0.1523 Epoch 14/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1760 - val_loss: 0.1517 Epoch 15/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1755 - val_loss: 0.1514 Epoch 16/70 43/43 [==============================] - 0s 3ms/step - loss: 0.1749 - val_loss: 0.1508 Epoch 17/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1746 - val_loss: 0.1507 Epoch 18/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1740 - val_loss: 0.1501 Epoch 19/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1738 - val_loss: 0.1502 Epoch 20/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1736 - val_loss: 0.1500 Epoch 21/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1730 - val_loss: 0.1496 Epoch 22/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1730 - val_loss: 0.1491 Epoch 23/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1723 - val_loss: 0.1493 Epoch 24/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1722 - val_loss: 0.1489 Epoch 25/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1719 - val_loss: 0.1486 Epoch 26/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1715 - val_loss: 0.1484 Epoch 27/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1712 - val_loss: 0.1479 Epoch 28/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1709 - val_loss: 0.1481 Epoch 29/70 43/43 [==============================] - 0s 3ms/step - loss: 0.1707 - val_loss: 0.1479 Epoch 30/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1706 - val_loss: 0.1475 Epoch 31/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1703 - val_loss: 0.1473 Epoch 32/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1700 - val_loss: 0.1475 Epoch 33/70 43/43 [==============================] - 0s 3ms/step - loss: 0.1699 - val_loss: 0.1475 Epoch 34/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1695 - val_loss: 0.1466 Epoch 35/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1692 - val_loss: 0.1467 Epoch 36/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1691 - val_loss: 0.1467 Epoch 37/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1691 - val_loss: 0.1466 Epoch 38/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1686 - val_loss: 0.1460 Epoch 39/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1686 - val_loss: 0.1458 Epoch 40/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1683 - val_loss: 0.1458 Epoch 41/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1683 - val_loss: 0.1455 Epoch 42/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1681 - val_loss: 0.1452 Epoch 43/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1680 - val_loss: 0.1450 Epoch 44/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1676 - val_loss: 0.1448 Epoch 45/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1677 - val_loss: 0.1449 Epoch 46/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1674 - val_loss: 0.1447 Epoch 47/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1674 - val_loss: 0.1447 Epoch 48/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1672 - val_loss: 0.1449 Epoch 49/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1673 - val_loss: 0.1444 Epoch 50/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1669 - val_loss: 0.1441 Epoch 51/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1667 - val_loss: 0.1439 Epoch 52/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1668 - val_loss: 0.1436 Epoch 53/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1668 - val_loss: 0.1443 Epoch 54/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1663 - val_loss: 0.1435 Epoch 55/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1664 - val_loss: 0.1435 Epoch 56/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1659 - val_loss: 0.1432 Epoch 57/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1661 - val_loss: 0.1431 Epoch 58/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1659 - val_loss: 0.1430 Epoch 59/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1659 - val_loss: 0.1430 Epoch 60/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1657 - val_loss: 0.1428 Epoch 61/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1658 - val_loss: 0.1425 Epoch 62/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1652 - val_loss: 0.1425 Epoch 63/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1654 - val_loss: 0.1424 Epoch 64/70 43/43 [==============================] - 0s 3ms/step - loss: 0.1654 - val_loss: 0.1423 Epoch 65/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1654 - val_loss: 0.1425 Epoch 66/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1651 - val_loss: 0.1423 Epoch 67/70 43/43 [==============================] - 0s 3ms/step - loss: 0.1652 - val_loss: 0.1422 Epoch 68/70 43/43 [==============================] - 0s 3ms/step - loss: 0.1646 - val_loss: 0.1419 Epoch 69/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1651 - val_loss: 0.1416 Epoch 70/70 43/43 [==============================] - 0s 4ms/step - loss: 0.1648 - val_loss: 0.1419 Epoch 1/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3920 - val_loss: 1.4086 Epoch 2/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3852 - val_loss: 1.4073 Epoch 3/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3836 - val_loss: 1.4061 Epoch 4/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3823 - val_loss: 1.4048 Epoch 5/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3818 - val_loss: 1.4051 Epoch 6/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3812 - val_loss: 1.4042 Epoch 7/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3811 - val_loss: 1.4045 Epoch 8/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3800 - val_loss: 1.4036 Epoch 9/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3802 - val_loss: 1.4035 Epoch 10/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3799 - val_loss: 1.4031 Epoch 11/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3798 - val_loss: 1.4031 Epoch 12/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3793 - val_loss: 1.4032 Epoch 13/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3792 - val_loss: 1.4028 Epoch 14/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3791 - val_loss: 1.4028 Epoch 15/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3787 - val_loss: 1.4027 Epoch 16/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3787 - val_loss: 1.4027 Epoch 17/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3785 - val_loss: 1.4024 Epoch 18/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3779 - val_loss: 1.4023 Epoch 19/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3784 - val_loss: 1.4023 Epoch 20/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3778 - val_loss: 1.4026 Epoch 21/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3778 - val_loss: 1.4023 Epoch 22/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3779 - val_loss: 1.4025 Epoch 23/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3774 - val_loss: 1.4020 Epoch 24/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3778 - val_loss: 1.4019 Epoch 25/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3777 - val_loss: 1.4024 Epoch 26/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3774 - val_loss: 1.4019 Epoch 27/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3772 - val_loss: 1.4021 Epoch 28/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3771 - val_loss: 1.4018 Epoch 29/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3772 - val_loss: 1.4017 Epoch 30/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3769 - val_loss: 1.4014 Epoch 31/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3769 - val_loss: 1.4012 Epoch 32/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3768 - val_loss: 1.4018 Epoch 33/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3771 - val_loss: 1.4017 Epoch 34/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3768 - val_loss: 1.4011 Epoch 35/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3765 - val_loss: 1.4012 Epoch 36/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3765 - val_loss: 1.4013 Epoch 37/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3766 - val_loss: 1.4015 Epoch 38/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3763 - val_loss: 1.4012 Epoch 39/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3762 - val_loss: 1.4011 Epoch 40/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3762 - val_loss: 1.4009 Epoch 41/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3759 - val_loss: 1.4013 Epoch 42/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3762 - val_loss: 1.4017 Epoch 43/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3759 - val_loss: 1.4008 Epoch 44/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3760 - val_loss: 1.4008 Epoch 45/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3756 - val_loss: 1.4008 Epoch 46/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3757 - val_loss: 1.4006 Epoch 47/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3757 - val_loss: 1.4010 Epoch 48/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3757 - val_loss: 1.4005 Epoch 49/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3753 - val_loss: 1.4008 Epoch 50/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3754 - val_loss: 1.4005 Epoch 51/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3757 - val_loss: 1.4006 Epoch 52/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3753 - val_loss: 1.4006 Epoch 53/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3754 - val_loss: 1.4005 Epoch 54/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3753 - val_loss: 1.4003 Epoch 55/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3750 - val_loss: 1.4005 Epoch 56/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3751 - val_loss: 1.4005 Epoch 57/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3752 - val_loss: 1.4009 Epoch 58/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3752 - val_loss: 1.4004 Epoch 59/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3750 - val_loss: 1.4003 Epoch 60/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3752 - val_loss: 1.4004 Epoch 61/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3749 - val_loss: 1.4003 Epoch 62/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3750 - val_loss: 1.4001 Epoch 63/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3750 - val_loss: 1.4002 Epoch 64/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3748 - val_loss: 1.4003 Epoch 65/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3748 - val_loss: 1.4001 Epoch 66/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3747 - val_loss: 1.4004 Epoch 67/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3747 - val_loss: 1.4001 Epoch 68/70 43/43 [==============================] - 0s 7ms/step - loss: 1.3747 - val_loss: 1.4005 Epoch 69/70 43/43 [==============================] - 0s 6ms/step - loss: 1.3745 - val_loss: 1.4003 Epoch 70/70 43/43 [==============================] - 0s 6ms/step - loss: 1.3745 - val_loss: 1.4003 Epoch 1/70 43/43 [==============================] - 0s 5ms/step - loss: 1.3991 - val_loss: 1.3902 Epoch 2/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3796 - val_loss: 1.3840 Epoch 3/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3759 - val_loss: 1.3818 Epoch 4/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3742 - val_loss: 1.3813 Epoch 5/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3741 - val_loss: 1.3800 Epoch 6/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3728 - val_loss: 1.3787 Epoch 7/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3716 - val_loss: 1.3781 Epoch 8/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3716 - val_loss: 1.3785 Epoch 9/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3709 - val_loss: 1.3777 Epoch 10/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3710 - val_loss: 1.3780 Epoch 11/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3705 - val_loss: 1.3770 Epoch 12/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3700 - val_loss: 1.3769 Epoch 13/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3695 - val_loss: 1.3769 Epoch 14/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3693 - val_loss: 1.3759 Epoch 15/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3692 - val_loss: 1.3764 Epoch 16/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3691 - val_loss: 1.3761 Epoch 17/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3689 - val_loss: 1.3756 Epoch 18/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3686 - val_loss: 1.3759 Epoch 19/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3684 - val_loss: 1.3757 Epoch 20/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3681 - val_loss: 1.3753 Epoch 21/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3684 - val_loss: 1.3754 Epoch 22/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3682 - val_loss: 1.3750 Epoch 23/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3682 - val_loss: 1.3754 Epoch 24/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3679 - val_loss: 1.3750 Epoch 25/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3681 - val_loss: 1.3755 Epoch 26/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3677 - val_loss: 1.3749 Epoch 27/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3674 - val_loss: 1.3751 Epoch 28/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3674 - val_loss: 1.3752 Epoch 29/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3675 - val_loss: 1.3747 Epoch 30/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3674 - val_loss: 1.3746 Epoch 31/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3674 - val_loss: 1.3746 Epoch 32/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3672 - val_loss: 1.3746 Epoch 33/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3670 - val_loss: 1.3746 Epoch 34/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3668 - val_loss: 1.3745 Epoch 35/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3668 - val_loss: 1.3745 Epoch 36/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3667 - val_loss: 1.3746 Epoch 37/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3665 - val_loss: 1.3744 Epoch 38/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3667 - val_loss: 1.3745 Epoch 39/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3667 - val_loss: 1.3749 Epoch 40/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3666 - val_loss: 1.3746 Epoch 41/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3667 - val_loss: 1.3743 Epoch 42/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3668 - val_loss: 1.3742 Epoch 43/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3664 - val_loss: 1.3738 Epoch 44/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3662 - val_loss: 1.3740 Epoch 45/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3663 - val_loss: 1.3739 Epoch 46/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3662 - val_loss: 1.3737 Epoch 47/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3661 - val_loss: 1.3739 Epoch 48/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3659 - val_loss: 1.3737 Epoch 49/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3660 - val_loss: 1.3737 Epoch 50/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3658 - val_loss: 1.3738 Epoch 51/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3659 - val_loss: 1.3736 Epoch 52/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3658 - val_loss: 1.3741 Epoch 53/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3661 - val_loss: 1.3740 Epoch 54/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3659 - val_loss: 1.3736 Epoch 55/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3657 - val_loss: 1.3736 Epoch 56/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3655 - val_loss: 1.3735 Epoch 57/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3658 - val_loss: 1.3738 Epoch 58/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3657 - val_loss: 1.3734 Epoch 59/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3658 - val_loss: 1.3737 Epoch 60/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3655 - val_loss: 1.3734 Epoch 61/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3654 - val_loss: 1.3735 Epoch 62/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3652 - val_loss: 1.3735 Epoch 63/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3654 - val_loss: 1.3736 Epoch 64/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3653 - val_loss: 1.3735 Epoch 65/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3651 - val_loss: 1.3734 Epoch 66/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3653 - val_loss: 1.3734 Epoch 67/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3652 - val_loss: 1.3731 Epoch 68/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3652 - val_loss: 1.3731 Epoch 69/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3651 - val_loss: 1.3732 Epoch 70/70 43/43 [==============================] - 0s 4ms/step - loss: 1.3652 - val_loss: 1.3730 Epoch 1/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4783 - val_loss: 1.4615 Epoch 2/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4290 - val_loss: 1.4551 Epoch 3/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4239 - val_loss: 1.4525 Epoch 4/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4222 - val_loss: 1.4505 Epoch 5/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4212 - val_loss: 1.4501 Epoch 6/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4197 - val_loss: 1.4493 Epoch 7/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4190 - val_loss: 1.4492 Epoch 8/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4188 - val_loss: 1.4488 Epoch 9/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4183 - val_loss: 1.4487 Epoch 10/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4176 - val_loss: 1.4479 Epoch 11/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4177 - val_loss: 1.4481 Epoch 12/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4175 - val_loss: 1.4477 Epoch 13/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4170 - val_loss: 1.4475 Epoch 14/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4168 - val_loss: 1.4473 Epoch 15/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4166 - val_loss: 1.4471 Epoch 16/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4164 - val_loss: 1.4471 Epoch 17/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4162 - val_loss: 1.4477 Epoch 18/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4160 - val_loss: 1.4467 Epoch 19/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4158 - val_loss: 1.4469 Epoch 20/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4160 - val_loss: 1.4468 Epoch 21/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4159 - val_loss: 1.4467 Epoch 22/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4156 - val_loss: 1.4463 Epoch 23/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4155 - val_loss: 1.4470 Epoch 24/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4156 - val_loss: 1.4467 Epoch 25/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4155 - val_loss: 1.4463 Epoch 26/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4152 - val_loss: 1.4461 Epoch 27/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4148 - val_loss: 1.4461 Epoch 28/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4148 - val_loss: 1.4460 Epoch 29/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4150 - val_loss: 1.4462 Epoch 30/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4149 - val_loss: 1.4463 Epoch 31/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4147 - val_loss: 1.4459 Epoch 32/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4145 - val_loss: 1.4459 Epoch 33/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4148 - val_loss: 1.4457 Epoch 34/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4146 - val_loss: 1.4461 Epoch 35/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4141 - val_loss: 1.4460 Epoch 36/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4143 - val_loss: 1.4458 Epoch 37/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4145 - val_loss: 1.4457 Epoch 38/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4143 - val_loss: 1.4459 Epoch 39/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4142 - val_loss: 1.4456 Epoch 40/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4141 - val_loss: 1.4459 Epoch 41/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4140 - val_loss: 1.4459 Epoch 42/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4139 - val_loss: 1.4455 Epoch 43/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4137 - val_loss: 1.4457 Epoch 44/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4138 - val_loss: 1.4458 Epoch 45/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4138 - val_loss: 1.4457 Epoch 46/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4136 - val_loss: 1.4458 Epoch 47/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4136 - val_loss: 1.4456 Epoch 48/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4135 - val_loss: 1.4453 Epoch 49/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4136 - val_loss: 1.4457 Epoch 50/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4136 - val_loss: 1.4453 Epoch 51/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4135 - val_loss: 1.4456 Epoch 52/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4133 - val_loss: 1.4453 Epoch 53/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4132 - val_loss: 1.4455 Epoch 54/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4134 - val_loss: 1.4452 Epoch 55/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4133 - val_loss: 1.4453 Epoch 56/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4134 - val_loss: 1.4454 Epoch 57/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4133 - val_loss: 1.4456 Epoch 58/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4131 - val_loss: 1.4451 Epoch 59/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4131 - val_loss: 1.4451 Epoch 60/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4130 - val_loss: 1.4451 Epoch 61/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4128 - val_loss: 1.4451 Epoch 62/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4128 - val_loss: 1.4454 Epoch 63/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4130 - val_loss: 1.4451 Epoch 64/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4127 - val_loss: 1.4449 Epoch 65/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4130 - val_loss: 1.4450 Epoch 66/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4130 - val_loss: 1.4450 Epoch 67/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4128 - val_loss: 1.4450 Epoch 68/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4126 - val_loss: 1.4450 Epoch 69/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4125 - val_loss: 1.4452 Epoch 70/70 43/43 [==============================] - 0s 4ms/step - loss: 1.4128 - val_loss: 1.4450
<keras.callbacks.History at 0x7f98f1e842d0>
encoded_x1 = model_encoder.predict(x1)
encoded_x2 = model_encoder.predict(x2)
encoded_x3 = model_encoder.predict(x3)
encoded_x4 = model_encoder.predict(x4)
47/47 [==============================] - 0s 3ms/step 47/47 [==============================] - 0s 2ms/step 47/47 [==============================] - 0s 4ms/step 47/47 [==============================] - 0s 2ms/step
fig, ax = plt.subplots(figsize=(10,10))
ax.scatter(encoded_x1[:, 0][0:2000], encoded_x1[:, 1][0:2000], label='Classe 1')
ax.scatter(encoded_x2[:, 0][0:2000], encoded_x2[:, 1][0:2000], label='Classe 2')
ax.scatter(encoded_x3[:, 0][0:2000], encoded_x3[:, 1][0:2000], label='Classe 3')
ax.scatter(encoded_x4[:, 0][0:2000], encoded_x4[:, 1][0:2000], label='Classe 4')
ax.legend()
plt.show()
Source: IG Tech Team
!gdown https://drive.google.com/uc?id=16YVKFWB3yZxs5WoKRnmIz4CBbSxSi3ue
Downloading... From: https://drive.google.com/uc?id=16YVKFWB3yZxs5WoKRnmIz4CBbSxSi3ue To: /content/book_pride_and_prejudice.txt 100% 725k/725k [00:00<00:00, 133MB/s]
file = open("book_pride_and_prejudice.txt", "r", encoding = "utf8")
# store file in list
lines = []
for i in file:
lines.append(i)
# Convert list to string
data = ""
for i in lines:
data = ' '. join(lines)
#replace unnecessary stuff with space
data = data.replace('\n', '').replace('\r', '').replace('\ufeff', '').replace('“','').replace('”','') #new line, carriage return, unicode character --> replace by space
#remove unnecessary spaces
data = data.split()
data = ' '.join(data)
data[:500]
'The Project Gutenberg eBook, Pride and Prejudice, by Jane Austen, Edited by R. W. (Robert William) Chapman This eBook is for the use of anyone anywhere at no cost and with almost no restrictions whatsoever. You may copy it, give it away or re-use it under the terms of the Project Gutenberg License included with this eBook or online at www.gutenberg.org Title: Pride and Prejudice Author: Jane Austen Editor: R. W. (Robert William) Chapman Release Date: May 9, 2013 [eBook #42671] Language: English '
len(data)
704792
tokenizer = Tokenizer()
tokenizer.fit_on_texts([data])
# saving the tokenizer for predict function
pickle.dump(tokenizer, open('token.pkl', 'wb'))
sequence_data = tokenizer.texts_to_sequences([data])[0]
sequence_data[:15]
[1, 178, 157, 1173, 290, 4, 877, 30, 73, 3195, 4176, 30, 3196, 2163, 3197]
len(sequence_data)
126018
vocab_size = len(tokenizer.word_index) + 1
print(vocab_size)
6736
sequences = []
for i in range(3, len(sequence_data)):
words = sequence_data[i-3:i+1]
sequences.append(words)
print("The Length of sequences are: ", len(sequences))
sequences = np.array(sequences)
sequences[:10]
The Length of sequences are: 126015
array([[ 1, 178, 157, 1173],
[ 178, 157, 1173, 290],
[ 157, 1173, 290, 4],
[1173, 290, 4, 877],
[ 290, 4, 877, 30],
[ 4, 877, 30, 73],
[ 877, 30, 73, 3195],
[ 30, 73, 3195, 4176],
[ 73, 3195, 4176, 30],
[3195, 4176, 30, 3196]])
X = []
y = []
for i in sequences:
X.append(i[0:3])
y.append(i[3])
X = np.array(X)
y = np.array(y)
print("Data: ", X[:10])
print("Response: ", y[:10])
Data: [[ 1 178 157] [ 178 157 1173] [ 157 1173 290] [1173 290 4] [ 290 4 877] [ 4 877 30] [ 877 30 73] [ 30 73 3195] [ 73 3195 4176] [3195 4176 30]] Response: [1173 290 4 877 30 73 3195 4176 30 3196]
y = to_categorical(y, num_classes=vocab_size)
y[:5]
array([[0., 0., 0., ..., 0., 0., 0.],
[0., 0., 0., ..., 0., 0., 0.],
[0., 0., 0., ..., 0., 0., 0.],
[0., 0., 0., ..., 0., 0., 0.],
[0., 0., 0., ..., 0., 0., 0.]], dtype=float32)
model = Sequential()
model.add(Embedding(vocab_size, 10, input_length=3))
model.add(LSTM(1000, return_sequences=True))
model.add(LSTM(1000))
model.add(Dense(1000, activation="relu"))
model.add(Dense(vocab_size, activation="softmax"))
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
embedding (Embedding) (None, 3, 10) 67360
lstm (LSTM) (None, 3, 1000) 4044000
lstm_1 (LSTM) (None, 1000) 8004000
dense (Dense) (None, 1000) 1001000
dense_1 (Dense) (None, 6736) 6742736
=================================================================
Total params: 19,859,096
Trainable params: 19,859,096
Non-trainable params: 0
_________________________________________________________________
keras.utils.plot_model(model, to_file='plot.png', show_layer_names=True)
checkpoint = ModelCheckpoint("next_words.h5", monitor='loss', verbose=1, save_best_only=True)
model.compile(loss="categorical_crossentropy", optimizer=Adam(learning_rate=0.001))
model.fit(X, y, epochs=70, batch_size=64, callbacks=[checkpoint])
Epoch 1/70 1967/1969 [============================>.] - ETA: 0s - loss: 6.2213 Epoch 1: loss improved from inf to 6.22144, saving model to next_words.h5 1969/1969 [==============================] - 38s 15ms/step - loss: 6.2214 Epoch 2/70 1967/1969 [============================>.] - ETA: 0s - loss: 5.5928 Epoch 2: loss improved from 6.22144 to 5.59246, saving model to next_words.h5 1969/1969 [==============================] - 31s 16ms/step - loss: 5.5925 Epoch 3/70 1968/1969 [============================>.] - ETA: 0s - loss: 5.2346 Epoch 3: loss improved from 5.59246 to 5.23473, saving model to next_words.h5 1969/1969 [==============================] - 31s 16ms/step - loss: 5.2347 Epoch 4/70 1966/1969 [============================>.] - ETA: 0s - loss: 4.9941 Epoch 4: loss improved from 5.23473 to 4.99407, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 4.9941 Epoch 5/70 1968/1969 [============================>.] - ETA: 0s - loss: 4.7817 Epoch 5: loss improved from 4.99407 to 4.78190, saving model to next_words.h5 1969/1969 [==============================] - 34s 17ms/step - loss: 4.7819 Epoch 6/70 1968/1969 [============================>.] - ETA: 0s - loss: 4.5734 Epoch 6: loss improved from 4.78190 to 4.57324, saving model to next_words.h5 1969/1969 [==============================] - 31s 16ms/step - loss: 4.5732 Epoch 7/70 1966/1969 [============================>.] - ETA: 0s - loss: 4.3664 Epoch 7: loss improved from 4.57324 to 4.36656, saving model to next_words.h5 1969/1969 [==============================] - 32s 16ms/step - loss: 4.3666 Epoch 8/70 1969/1969 [==============================] - ETA: 0s - loss: 4.1558 Epoch 8: loss improved from 4.36656 to 4.15576, saving model to next_words.h5 1969/1969 [==============================] - 32s 16ms/step - loss: 4.1558 Epoch 9/70 1968/1969 [============================>.] - ETA: 0s - loss: 3.9444 Epoch 9: loss improved from 4.15576 to 3.94454, saving model to next_words.h5 1969/1969 [==============================] - 31s 16ms/step - loss: 3.9445 Epoch 10/70 1969/1969 [==============================] - ETA: 0s - loss: 3.7239 Epoch 10: loss improved from 3.94454 to 3.72393, saving model to next_words.h5 1969/1969 [==============================] - 31s 16ms/step - loss: 3.7239 Epoch 11/70 1969/1969 [==============================] - ETA: 0s - loss: 3.4972 Epoch 11: loss improved from 3.72393 to 3.49719, saving model to next_words.h5 1969/1969 [==============================] - 31s 16ms/step - loss: 3.4972 Epoch 12/70 1969/1969 [==============================] - ETA: 0s - loss: 3.2710 Epoch 12: loss improved from 3.49719 to 3.27099, saving model to next_words.h5 1969/1969 [==============================] - 31s 16ms/step - loss: 3.2710 Epoch 13/70 1968/1969 [============================>.] - ETA: 0s - loss: 3.0409 Epoch 13: loss improved from 3.27099 to 3.04093, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 3.0409 Epoch 14/70 1966/1969 [============================>.] - ETA: 0s - loss: 2.8093 Epoch 14: loss improved from 3.04093 to 2.80955, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 2.8095 Epoch 15/70 1966/1969 [============================>.] - ETA: 0s - loss: 2.5720 Epoch 15: loss improved from 2.80955 to 2.57212, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 2.5721 Epoch 16/70 1966/1969 [============================>.] - ETA: 0s - loss: 2.3366 Epoch 16: loss improved from 2.57212 to 2.33708, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 2.3371 Epoch 17/70 1968/1969 [============================>.] - ETA: 0s - loss: 2.1044 Epoch 17: loss improved from 2.33708 to 2.10462, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 2.1046 Epoch 18/70 1968/1969 [============================>.] - ETA: 0s - loss: 1.8812 Epoch 18: loss improved from 2.10462 to 1.88128, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 1.8813 Epoch 19/70 1967/1969 [============================>.] - ETA: 0s - loss: 1.6786 Epoch 19: loss improved from 1.88128 to 1.67849, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 1.6785 Epoch 20/70 1969/1969 [==============================] - ETA: 0s - loss: 1.4961 Epoch 20: loss improved from 1.67849 to 1.49605, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 1.4961 Epoch 21/70 1969/1969 [==============================] - ETA: 0s - loss: 1.3360 Epoch 21: loss improved from 1.49605 to 1.33600, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 1.3360 Epoch 22/70 1966/1969 [============================>.] - ETA: 0s - loss: 1.2089 Epoch 22: loss improved from 1.33600 to 1.20907, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 1.2091 Epoch 23/70 1968/1969 [============================>.] - ETA: 0s - loss: 1.0943 Epoch 23: loss improved from 1.20907 to 1.09452, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 1.0945 Epoch 24/70 1966/1969 [============================>.] - ETA: 0s - loss: 1.0109 Epoch 24: loss improved from 1.09452 to 1.01142, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 1.0114 Epoch 25/70 1969/1969 [==============================] - ETA: 0s - loss: 0.9371 Epoch 25: loss improved from 1.01142 to 0.93705, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.9371 Epoch 26/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.8803 Epoch 26: loss improved from 0.93705 to 0.88057, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.8806 Epoch 27/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.8355 Epoch 27: loss improved from 0.88057 to 0.83559, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.8356 Epoch 28/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.7963 Epoch 28: loss improved from 0.83559 to 0.79628, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.7963 Epoch 29/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.7631 Epoch 29: loss improved from 0.79628 to 0.76315, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.7632 Epoch 30/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.7394 Epoch 30: loss improved from 0.76315 to 0.73949, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.7395 Epoch 31/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.7143 Epoch 31: loss improved from 0.73949 to 0.71457, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.7146 Epoch 32/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.6929 Epoch 32: loss improved from 0.71457 to 0.69305, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.6931 Epoch 33/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.6733 Epoch 33: loss improved from 0.69305 to 0.67333, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.6733 Epoch 34/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.6573 Epoch 34: loss improved from 0.67333 to 0.65742, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.6574 Epoch 35/70 1969/1969 [==============================] - ETA: 0s - loss: 0.6417 Epoch 35: loss improved from 0.65742 to 0.64165, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.6417 Epoch 36/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.6309 Epoch 36: loss improved from 0.64165 to 0.63120, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.6312 Epoch 37/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.6164 Epoch 37: loss improved from 0.63120 to 0.61654, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.6165 Epoch 38/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.6076 Epoch 38: loss improved from 0.61654 to 0.60783, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.6078 Epoch 39/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.5935 Epoch 39: loss improved from 0.60783 to 0.59348, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5935 Epoch 40/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.5837 Epoch 40: loss improved from 0.59348 to 0.58365, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5837 Epoch 41/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.5782 Epoch 41: loss improved from 0.58365 to 0.57811, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5781 Epoch 42/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.5717 Epoch 42: loss improved from 0.57811 to 0.57181, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5718 Epoch 43/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.5587 Epoch 43: loss improved from 0.57181 to 0.55869, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5587 Epoch 44/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.5517 Epoch 44: loss improved from 0.55869 to 0.55181, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5518 Epoch 45/70 1969/1969 [==============================] - ETA: 0s - loss: 0.5479 Epoch 45: loss improved from 0.55181 to 0.54790, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5479 Epoch 46/70 1969/1969 [==============================] - ETA: 0s - loss: 0.5418 Epoch 46: loss improved from 0.54790 to 0.54185, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5418 Epoch 47/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.5312 Epoch 47: loss improved from 0.54185 to 0.53141, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5314 Epoch 48/70 1969/1969 [==============================] - ETA: 0s - loss: 0.5283 Epoch 48: loss improved from 0.53141 to 0.52831, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5283 Epoch 49/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.5220 Epoch 49: loss improved from 0.52831 to 0.52200, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 0.5220 Epoch 50/70 1969/1969 [==============================] - ETA: 0s - loss: 0.5149 Epoch 50: loss improved from 0.52200 to 0.51486, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 0.5149 Epoch 51/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.5099 Epoch 51: loss improved from 0.51486 to 0.50998, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 0.5100 Epoch 52/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.5071 Epoch 52: loss improved from 0.50998 to 0.50714, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.5071 Epoch 53/70 1969/1969 [==============================] - ETA: 0s - loss: 0.5020 Epoch 53: loss improved from 0.50714 to 0.50196, saving model to next_words.h5 1969/1969 [==============================] - 29s 15ms/step - loss: 0.5020 Epoch 54/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.4953 Epoch 54: loss improved from 0.50196 to 0.49553, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4955 Epoch 55/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.4922 Epoch 55: loss improved from 0.49553 to 0.49225, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4923 Epoch 56/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.4926 Epoch 56: loss did not improve from 0.49225 1969/1969 [==============================] - 29s 15ms/step - loss: 0.4928 Epoch 57/70 1969/1969 [==============================] - ETA: 0s - loss: 0.4843 Epoch 57: loss improved from 0.49225 to 0.48430, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4843 Epoch 58/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.4798 Epoch 58: loss improved from 0.48430 to 0.47989, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4799 Epoch 59/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.4815 Epoch 59: loss did not improve from 0.47989 1969/1969 [==============================] - 29s 15ms/step - loss: 0.4818 Epoch 60/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.4740 Epoch 60: loss improved from 0.47989 to 0.47393, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4739 Epoch 61/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.4702 Epoch 61: loss improved from 0.47393 to 0.47016, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4702 Epoch 62/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.4713 Epoch 62: loss did not improve from 0.47016 1969/1969 [==============================] - 29s 15ms/step - loss: 0.4713 Epoch 63/70 1967/1969 [============================>.] - ETA: 0s - loss: 0.4651 Epoch 63: loss improved from 0.47016 to 0.46517, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4652 Epoch 64/70 1969/1969 [==============================] - ETA: 0s - loss: 0.4593 Epoch 64: loss improved from 0.46517 to 0.45933, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4593 Epoch 65/70 1969/1969 [==============================] - ETA: 0s - loss: 0.4599 Epoch 65: loss did not improve from 0.45933 1969/1969 [==============================] - 29s 15ms/step - loss: 0.4599 Epoch 66/70 1968/1969 [============================>.] - ETA: 0s - loss: 0.4577 Epoch 66: loss improved from 0.45933 to 0.45772, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4577 Epoch 67/70 1969/1969 [==============================] - ETA: 0s - loss: 0.4570 Epoch 67: loss improved from 0.45772 to 0.45697, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4570 Epoch 68/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.4499 Epoch 68: loss improved from 0.45697 to 0.45000, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4500 Epoch 69/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.4506 Epoch 69: loss did not improve from 0.45000 1969/1969 [==============================] - 29s 15ms/step - loss: 0.4507 Epoch 70/70 1966/1969 [============================>.] - ETA: 0s - loss: 0.4470 Epoch 70: loss improved from 0.45000 to 0.44713, saving model to next_words.h5 1969/1969 [==============================] - 30s 15ms/step - loss: 0.4471
<keras.callbacks.History at 0x7f42fdb86150>
# Load the model and tokenizer
model = load_model('next_words.h5')
tokenizer = pickle.load(open('token.pkl', 'rb'))
def Predict_Next_Words(model, tokenizer, text):
sequence = tokenizer.texts_to_sequences([text])
sequence = np.array(sequence)
preds = np.argmax(model.predict(sequence))
predicted_word = ""
for key, value in tokenizer.word_index.items():
if value == preds:
predicted_word = key
break
print(predicted_word)
return predicted_word
while(True):
text = input("Enter your line: ")
if text == "0":
print("Execution completed.....")
break
else:
try:
text = text.split(" ")
text = text[-3:]
print(text)
Predict_Next_Words(model, tokenizer, text)
except Exception as e:
print("Error occurred: ",e)
continue
Enter your line: The Project Gutenberg eBook ['Project', 'Gutenberg', 'eBook'] 1/1 [==============================] - 2s 2s/step pride Enter your line: The Project Gutenberg eBook, Pride ['Gutenberg', 'eBook,', 'Pride'] 1/1 [==============================] - 1s 636ms/step and Enter your line: how can you abuse your own ['abuse', 'your', 'own'] 1/1 [==============================] - 0s 15ms/step children Enter your line: He was quite ['He', 'was', 'quite'] 1/1 [==============================] - 0s 17ms/step young Enter your line: He could not help seeing that you were about five times as ['five', 'times', 'as'] 1/1 [==============================] - 0s 16ms/step pretty Enter your line: Mrs. Hurst and her sister ['and', 'her', 'sister'] 1/1 [==============================] - 0s 16ms/step scarcely Enter your line: Elizabeth preserved as steady a ['as', 'steady', 'a'] 1/1 [==============================] - 0s 15ms/step silence Enter your line: however, it may all come to ['all', 'come', 'to'] 1/1 [==============================] - 0s 16ms/step nothing Enter your line: 0 Execution completed.....
Article: Ftl-CoV19: A Transfer Learning Approach to Detect COVID-19
Source: Saurabh et al. [2022]
English version
The article study a transfer learning approach to detect COVID-19. Initially, the contextualization of the face of the new coronavirus in the world since its emergence, the most common symptoms and risk factors that can cause the disease to worsen in certain individuals is presented. In this context, due to the lack of sufficient data and the high level of unpredictability, a standard model is not the best option. Thus, the study proposes the use of transfer learning, based on the pre-trained VGG-16 model, focusing on data with chest X-rays.
First, the article seeks to show the growth, challenges and consequences of COVID-19 in the world, emphasizing the role that artificial intelligence has to detect similar patterns and make predictions, including acting in the detection of diseases, in a way that can be obtained greater precision beyond the conventional tests already used. In addition, the authors present some studies carried out on a machine learning algorithm to analyze drugs and collaborate in the creation of means to contain the spread of the virus, for example, as explained by recent research carried out to detect COVID-19 through of artificial intelligence.
Thus, the approach proposed by the authors is a fine tuning transfer learning-coronavirus (Ftl-CoV19), from four phases, which involve the dataset, pre-processing, training and detection. In the first phase, the data consist of chest radiographs and CT scans, with 1,281 being COVID-19 and 1,481 being normal diagnoses. After that, there is the data pre-processing, in which the images are modeled to the desired size, from 450 x 446 to 224 x 224, and selected so that blurry or annotated images can be discarded, for example, to facilitate training and data validation. In training, CNN and the pre-trained VGG16 are used together with transfer learning and fine tuning, whose ratio used was 80 : 20 for training and testing with 2210 images for training and 552 images for validation.
In this way, the proposed approach called Ftl-CoV19 was compared with other pre-trained models, such as ResNet50, InceptionV3 and Xception under very similar test conditions, and ended up achieving better results, with training and validation accuracy of 98.82% and 99.27%, respectively.
Portuguese version
O artigo estuda uma abordagem de transfer learning para detectar COVID-19 Inicialmente, é apresentada a contextualização sobre o enfrentamento do novo coronavírus no mundo desde o seu surgimento, sintomas mais comuns e fatores de risco que podem provocar o agravamento da doença em determinados indivíduos. Dentro desse contexto, em decorrência da ausência de dados suficientes e do alto nível de imprevisibilidade, um modelo padrão acaba não sendo a melhor opção. Assim, o estudo propõe a utilização de transfer learning, a partir do modelo pré-treinado VGG-16, tendo como base raios-X de toráx.
Primeiramente, o artigo busca mostrar o crescimento, os desafios e as consequências da COVID-19 no mundo, enfatizando o papel que a inteligência artificial possui para detectar padrões semelhantes e realizar predições, incluindo atuar na detecção de doenças, de forma que se possa obter uma maior precisão para além dos testes convencionais já utilizados. Além disso, os autores apresentam alguns estudos realizados à respeito de uma algoritmo de aprendizado de máquina para analisar medicamentos e colaborar na criação de meios para conter a propagação do vírus, por exemplo, tal como explana pesquisas recentes realizadas para detectar COVID-19 por meio de inteligência artificial.
Com isso, a abordagem proposta pelos autores trata-se de uma técnica de ajuste fino de aprendizado de transferência de coronavírus 19 (Ftl-CoV19), a partir de quatro fases, as quais envolvem o conjunto de dados, o pré-processamento, o treinamento e a detecção. Na primeira fase, os dados consistem em radiografias de tórax e tomografias computadorizadas, com a presença de 1.281 sendo COVID-19 e 1.481 diagnósticos normais. Em seguida, tem-se o pré-processamento de dados, em que as imagens são modeladas pra o tamanho desejado, indo de 450 x 446 para 224 x 224, e selecionadas de forma que se possa descartar imagens borradas ou com anotações, por exemplo, para facilitar o treinamento e a validação dos dados. Já no treinamento, usa-se CNN e o VGG16 pré-treinado em conjunto com transfer learning e fine tuning, cuja proporção utilizada foi 80 : 20 para treinamento e teste com 2210 imagens para treinar e 552 imagens para validação.
Dessa forma, a abordagem proposta chamada de Ftl-CoV19 foi comparada com outros modelos pré-treinados, como ResNet50, InceptionV3 e Xception sob condições bastante parecidas de teste, e acabou alcançando melhores resultados, com precisão de treinamento e validação de 98,82% e 99,27%, respectivamente.
Source: Kaggle: Pneumonia Detection using CNN
The dataset is organized into 3 folders (train, test, val) and contains subfolders for each image category (Pneumonia/Normal). There are 5,863 X-Ray images (JPEG) and 2 categories (Pneumonia/Normal). Chest X-ray images (anterior-posterior) were selected from retrospective cohorts of pediatric patients of one to five years old from Guangzhou Women and Children’s Medical Center, Guangzhou. All chest X-ray imaging was performed as part of patients’ routine clinical care. For the analysis of chest x-ray images, all chest radiographs were initially screened for quality control by removing all low quality or unreadable scans. The diagnoses for the images were then graded by two expert physicians before being cleared for training the AI system. In order to account for any grading errors, the evaluation set was also checked by a third expert.
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
labels = ['PNEUMONIA', 'NORMAL']
img_size = 150
def get_training_data(data_dir):
data = []
for label in labels:
path = os.path.join(data_dir, label)
class_num = labels.index(label)
for img in os.listdir(path):
try:
img_arr = cv2.imread(os.path.join(path, img), cv2.IMREAD_GRAYSCALE)
resized_arr = cv2.resize(img_arr, (img_size, img_size)) # Reshaping images to preferred size
data.append([resized_arr, class_num])
except Exception as e:
print(e)
return np.array(data)
train = get_training_data('/content/drive/MyDrive/artificial_intelligence/chest_xray/train/')
test = get_training_data('/content/drive/MyDrive/artificial_intelligence/chest_xray/test/')
val = get_training_data('/content/drive/MyDrive/artificial_intelligence/chest_xray/val/')
/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:15: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. from ipykernel import kernelapp as app
l = []
for i in train:
if(i[1] == 0):
l.append("Pneumonia")
else:
l.append("Normal")
sns.set_style('darkgrid')
sns.countplot(l)
/usr/local/lib/python3.7/dist-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation. FutureWarning
<matplotlib.axes._subplots.AxesSubplot at 0x7ff05c3449d0>
The data seems imbalanced. Solution: Data Augmentation
# Previewing the images of both the classes
plt.figure(figsize = (5,5))
plt.imshow(train[0][0], cmap='gray')
plt.title(labels[train[0][1]])
plt.figure(figsize = (5,5))
plt.imshow(train[-1][0], cmap='gray')
plt.title(labels[train[-1][1]])
Text(0.5, 1.0, 'NORMAL')
x_train = []
y_train = []
x_val = []
y_val = []
x_test = []
y_test = []
for feature, label in train:
x_train.append(feature)
y_train.append(label)
for feature, label in test:
x_test.append(feature)
y_test.append(label)
for feature, label in val:
x_val.append(feature)
y_val.append(label)
# Normalize the data
x_train = np.array(x_train) / 255
x_val = np.array(x_val) / 255
x_test = np.array(x_test) / 255
# resize data for deep learning
x_train = x_train.reshape(-1, img_size, img_size, 1)
y_train = np.array(y_train)
x_val = x_val.reshape(-1, img_size, img_size, 1)
y_val = np.array(y_val)
x_test = x_test.reshape(-1, img_size, img_size, 1)
y_test = np.array(y_test)
# With data augmentation to prevent overfitting and handling the imbalance in dataset
datagen = ImageDataGenerator(
featurewise_center=False, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=False, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range = 30, # randomly rotate images in the range (degrees, 0 to 180)
zoom_range = 0.2, # Randomly zoom image
width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)
height_shift_range=0.1, # randomly shift images vertically (fraction of total height)
horizontal_flip = True, # randomly flip images
vertical_flip=False) # randomly flip images
datagen.fit(x_train)
For the data augmentation:
model = Sequential()
model.add(Conv2D(32 , (3,3) , strides = 1 , padding = 'same' , activation = 'relu' , input_shape = (150,150,1)))
model.add(BatchNormalization())
model.add(MaxPool2D((2,2) , strides = 2 , padding = 'same'))
model.add(Conv2D(64 , (3,3) , strides = 1 , padding = 'same' , activation = 'relu'))
model.add(Dropout(0.1))
model.add(BatchNormalization())
model.add(MaxPool2D((2,2) , strides = 2 , padding = 'same'))
model.add(Conv2D(64 , (3,3) , strides = 1 , padding = 'same' , activation = 'relu'))
model.add(BatchNormalization())
model.add(MaxPool2D((2,2) , strides = 2 , padding = 'same'))
model.add(Conv2D(128 , (3,3) , strides = 1 , padding = 'same' , activation = 'relu'))
model.add(Dropout(0.2))
model.add(BatchNormalization())
model.add(MaxPool2D((2,2) , strides = 2 , padding = 'same'))
model.add(Conv2D(256 , (3,3) , strides = 1 , padding = 'same' , activation = 'relu'))
model.add(Dropout(0.2))
model.add(BatchNormalization())
model.add(MaxPool2D((2,2) , strides = 2 , padding = 'same'))
model.add(Flatten())
model.add(Dense(units = 128 , activation = 'relu'))
model.add(Dropout(0.2))
model.add(Dense(units = 1 , activation = 'sigmoid'))
model.compile(optimizer = "rmsprop" , loss = 'binary_crossentropy' , metrics = ['accuracy'])
model.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_5 (Conv2D) (None, 150, 150, 32) 320
batch_normalization_5 (Batc (None, 150, 150, 32) 128
hNormalization)
max_pooling2d_5 (MaxPooling (None, 75, 75, 32) 0
2D)
conv2d_6 (Conv2D) (None, 75, 75, 64) 18496
dropout_4 (Dropout) (None, 75, 75, 64) 0
batch_normalization_6 (Batc (None, 75, 75, 64) 256
hNormalization)
max_pooling2d_6 (MaxPooling (None, 38, 38, 64) 0
2D)
conv2d_7 (Conv2D) (None, 38, 38, 64) 36928
batch_normalization_7 (Batc (None, 38, 38, 64) 256
hNormalization)
max_pooling2d_7 (MaxPooling (None, 19, 19, 64) 0
2D)
conv2d_8 (Conv2D) (None, 19, 19, 128) 73856
dropout_5 (Dropout) (None, 19, 19, 128) 0
batch_normalization_8 (Batc (None, 19, 19, 128) 512
hNormalization)
max_pooling2d_8 (MaxPooling (None, 10, 10, 128) 0
2D)
conv2d_9 (Conv2D) (None, 10, 10, 256) 295168
dropout_6 (Dropout) (None, 10, 10, 256) 0
batch_normalization_9 (Batc (None, 10, 10, 256) 1024
hNormalization)
max_pooling2d_9 (MaxPooling (None, 5, 5, 256) 0
2D)
flatten_1 (Flatten) (None, 6400) 0
dense_2 (Dense) (None, 128) 819328
dropout_7 (Dropout) (None, 128) 0
dense_3 (Dense) (None, 1) 129
=================================================================
Total params: 1,246,401
Trainable params: 1,245,313
Non-trainable params: 1,088
_________________________________________________________________
learning_rate_reduction = ReduceLROnPlateau(monitor='val_accuracy', patience = 2, verbose=1,factor=0.3, min_lr=0.000001)
history = model.fit(datagen.flow(x_train,y_train, batch_size = 32) ,epochs = 20 , validation_data = datagen.flow(x_val, y_val) ,callbacks = [learning_rate_reduction])
Epoch 1/20 67/67 [==============================] - 7s 77ms/step - loss: 0.8254 - accuracy: 0.8199 - val_loss: 12.5577 - val_accuracy: 0.5000 - lr: 0.0010 Epoch 2/20 67/67 [==============================] - 5s 74ms/step - loss: 0.3227 - accuracy: 0.8669 - val_loss: 51.1323 - val_accuracy: 0.5000 - lr: 0.0010 Epoch 3/20 67/67 [==============================] - ETA: 0s - loss: 0.2573 - accuracy: 0.8886 Epoch 3: ReduceLROnPlateau reducing learning rate to 0.0003000000142492354. 67/67 [==============================] - 5s 74ms/step - loss: 0.2573 - accuracy: 0.8886 - val_loss: 104.3453 - val_accuracy: 0.5000 - lr: 0.0010 Epoch 4/20 67/67 [==============================] - 5s 73ms/step - loss: 0.1527 - accuracy: 0.9403 - val_loss: 64.3176 - val_accuracy: 0.5000 - lr: 3.0000e-04 Epoch 5/20 67/67 [==============================] - ETA: 0s - loss: 0.1355 - accuracy: 0.9417 Epoch 5: ReduceLROnPlateau reducing learning rate to 9.000000427477062e-05. 67/67 [==============================] - 5s 74ms/step - loss: 0.1355 - accuracy: 0.9417 - val_loss: 59.4602 - val_accuracy: 0.5000 - lr: 3.0000e-04 Epoch 6/20 67/67 [==============================] - 5s 74ms/step - loss: 0.1108 - accuracy: 0.9525 - val_loss: 48.3700 - val_accuracy: 0.5000 - lr: 9.0000e-05 Epoch 7/20 67/67 [==============================] - ETA: 0s - loss: 0.1058 - accuracy: 0.9549 Epoch 7: ReduceLROnPlateau reducing learning rate to 2.700000040931627e-05. 67/67 [==============================] - 5s 72ms/step - loss: 0.1058 - accuracy: 0.9549 - val_loss: 48.9783 - val_accuracy: 0.5000 - lr: 9.0000e-05 Epoch 8/20 67/67 [==============================] - 5s 72ms/step - loss: 0.0860 - accuracy: 0.9699 - val_loss: 42.8088 - val_accuracy: 0.5000 - lr: 2.7000e-05 Epoch 9/20 67/67 [==============================] - ETA: 0s - loss: 0.0949 - accuracy: 0.9577 Epoch 9: ReduceLROnPlateau reducing learning rate to 8.100000013655517e-06. 67/67 [==============================] - 5s 74ms/step - loss: 0.0949 - accuracy: 0.9577 - val_loss: 39.0893 - val_accuracy: 0.5000 - lr: 2.7000e-05 Epoch 10/20 67/67 [==============================] - 5s 73ms/step - loss: 0.0911 - accuracy: 0.9629 - val_loss: 17.0112 - val_accuracy: 0.5000 - lr: 8.1000e-06 Epoch 11/20 67/67 [==============================] - 5s 73ms/step - loss: 0.0916 - accuracy: 0.9624 - val_loss: 4.8175 - val_accuracy: 0.5859 - lr: 8.1000e-06 Epoch 12/20 67/67 [==============================] - 5s 73ms/step - loss: 0.0925 - accuracy: 0.9638 - val_loss: 1.8769 - val_accuracy: 0.7803 - lr: 8.1000e-06 Epoch 13/20 67/67 [==============================] - 5s 73ms/step - loss: 0.0844 - accuracy: 0.9661 - val_loss: 0.7100 - val_accuracy: 0.9167 - lr: 8.1000e-06 Epoch 14/20 67/67 [==============================] - 6s 87ms/step - loss: 0.0918 - accuracy: 0.9661 - val_loss: 0.7608 - val_accuracy: 0.9318 - lr: 8.1000e-06 Epoch 15/20 67/67 [==============================] - 5s 74ms/step - loss: 0.0840 - accuracy: 0.9709 - val_loss: 0.6393 - val_accuracy: 0.9167 - lr: 8.1000e-06 Epoch 16/20 67/67 [==============================] - ETA: 0s - loss: 0.0903 - accuracy: 0.9643 Epoch 16: ReduceLROnPlateau reducing learning rate to 2.429999949526973e-06. 67/67 [==============================] - 5s 72ms/step - loss: 0.0903 - accuracy: 0.9643 - val_loss: 0.9324 - val_accuracy: 0.9293 - lr: 8.1000e-06 Epoch 17/20 67/67 [==============================] - 5s 73ms/step - loss: 0.0858 - accuracy: 0.9676 - val_loss: 0.6854 - val_accuracy: 0.9293 - lr: 2.4300e-06 Epoch 18/20 67/67 [==============================] - ETA: 0s - loss: 0.0901 - accuracy: 0.9694 Epoch 18: ReduceLROnPlateau reducing learning rate to 1e-06. 67/67 [==============================] - 5s 74ms/step - loss: 0.0901 - accuracy: 0.9694 - val_loss: 0.6776 - val_accuracy: 0.9116 - lr: 2.4300e-06 Epoch 19/20 67/67 [==============================] - 5s 73ms/step - loss: 0.0892 - accuracy: 0.9643 - val_loss: 0.7045 - val_accuracy: 0.9091 - lr: 1.0000e-06 Epoch 20/20 67/67 [==============================] - 5s 73ms/step - loss: 0.0849 - accuracy: 0.9661 - val_loss: 0.7006 - val_accuracy: 0.9343 - lr: 1.0000e-06
print("Loss of the model is - " , model.evaluate(x_test,y_test)[0])
print("Accuracy of the model is - " , model.evaluate(x_test,y_test)[1]*100 , "%")
20/20 [==============================] - 0s 15ms/step - loss: 0.6472 - accuracy: 0.8590 Loss of the model is - 0.647210419178009 20/20 [==============================] - 0s 12ms/step - loss: 0.6472 - accuracy: 0.8590 Accuracy of the model is - 85.89743375778198 %
epochs = [i for i in range(20)]
fig , ax = plt.subplots(1,2)
train_acc = history.history['accuracy']
train_loss = history.history['loss']
val_acc = history.history['val_accuracy']
val_loss = history.history['val_loss']
fig.set_size_inches(20,10)
ax[0].plot(epochs , train_acc , 'go-' , label = 'Training Accuracy')
ax[0].plot(epochs , val_acc , 'ro-' , label = 'Validation Accuracy')
ax[0].set_title('Training & Validation Accuracy')
ax[0].legend()
ax[0].set_xlabel("Epochs")
ax[0].set_ylabel("Accuracy")
ax[1].plot(epochs , train_loss , 'g-o' , label = 'Training Loss')
ax[1].plot(epochs , val_loss , 'r-o' , label = 'Validation Loss')
ax[1].set_title('Testing Accuracy & Loss')
ax[1].legend()
ax[1].set_xlabel("Epochs")
ax[1].set_ylabel("Training & Validation Loss")
plt.show()
predictions = model.predict(x_test)
predictions = np.where(predictions > 0.5, 1,0)
predictions = predictions.reshape(1,-1)[0]
predictions[:15]
20/20 [==============================] - 0s 12ms/step
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
print(classification_report(y_test, predictions, target_names = ['Pneumonia (Class 0)','Normal (Class 1)']))
precision recall f1-score support
Pneumonia (Class 0) 0.86 0.93 0.89 390
Normal (Class 1) 0.86 0.74 0.80 234
accuracy 0.86 624
macro avg 0.86 0.84 0.84 624
weighted avg 0.86 0.86 0.86 624
cm = confusion_matrix(y_test,predictions)
cm
array([[363, 27],
[ 61, 173]])
cm = pd.DataFrame(cm , index = ['0','1'] , columns = ['0','1'])
plt.figure(figsize = (10,10))
sns.heatmap(cm,cmap= "Blues", linecolor = 'black' , linewidth = 1 , annot = True, fmt='',xticklabels = labels,yticklabels = labels)
<matplotlib.axes._subplots.AxesSubplot at 0x7ff05c3a6b10>
correct = np.nonzero(predictions == y_test)[0]
incorrect = np.nonzero(predictions != y_test)[0]
i = 0
for c in correct[:6]:
plt.subplot(3,2,i+1)
plt.xticks([])
plt.yticks([])
plt.imshow(x_test[c].reshape(150,150), cmap="gray", interpolation='none')
plt.title("Predicted Class {},Actual Class {}".format(predictions[c], y_test[c]))
plt.tight_layout()
i += 1
i = 0
for c in incorrect[:6]:
plt.subplot(3,2,i+1)
plt.xticks([])
plt.yticks([])
plt.imshow(x_test[c].reshape(150,150), cmap="gray", interpolation='none')
plt.title("Predicted Class {},Actual Class {}".format(predictions[c], y_test[c]))
plt.tight_layout()
i += 1
# Load CIFAR10 data
(X_train, y_train), (_, _) = cifar10.load_data()
# Select a single class images (birds)
X_train = X_train[y_train.flatten() == 2]
# Input shape
img_rows = 32
img_cols = 32
channels = 3
img_shape = (img_rows, img_cols, channels)
latent_dim = 100
def build_generator():
model = Sequential()
model.add(Dense(128 * 8 * 8, activation="relu", input_dim=latent_dim))
model.add(Reshape((8, 8, 128)))
model.add(UpSampling2D())#upsamples to 16*16*128
model.add(Conv2D(128, kernel_size=3, padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation("relu"))
model.add(UpSampling2D()) #upsamples to 32*32*128
model.add(Conv2D(64, kernel_size=3, padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation("relu"))
model.add(Conv2D(channels, kernel_size=3, padding="same"))
model.add(Activation("tanh"))
model.summary()
#outputs an image of 32*32*3
noise = Input(shape=(latent_dim,))
img = model(noise)
return Model(noise, img)
def build_discriminator():
model = Sequential()
model.add(Conv2D(32, kernel_size=3, strides=2, input_shape=img_shape, padding="same"))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
#no normalization for the first layer
model.add(Conv2D(64, kernel_size=3, strides=2, padding="same"))
model.add(ZeroPadding2D(padding=((0,1),(0,1))))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Conv2D(128, kernel_size=3, strides=2, padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Conv2D(256, kernel_size=3, strides=1, padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(1, activation='sigmoid'))
model.summary()
img = Input(shape=img_shape)
validity = model(img)
return Model(img, validity)
# Build and compile the discriminator
discriminator = build_discriminator()
discriminator.compile(loss='binary_crossentropy',
optimizer=Adam(0.0002,0.5),
metrics=['accuracy'])
# Build the generator
generator = build_generator()
# The generator takes noise as input and generates imgs
z = Input(shape=(latent_dim,))
img = generator(z)
# For the combined model we will only train the generator
discriminator.trainable = False
# The discriminator takes generated images as input and determines validity
valid = discriminator(img)
# The combined model (stacked generator and discriminator)
# Trains the generator to fool the discriminator
combined = Model(z, valid)
combined.compile(loss='binary_crossentropy', optimizer=Adam(0.0002,0.5))
Model: "sequential_4"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_14 (Conv2D) (None, 16, 16, 32) 896
leaky_re_lu_8 (LeakyReLU) (None, 16, 16, 32) 0
dropout_8 (Dropout) (None, 16, 16, 32) 0
conv2d_15 (Conv2D) (None, 8, 8, 64) 18496
zero_padding2d_2 (ZeroPaddi (None, 9, 9, 64) 0
ng2D)
batch_normalization_10 (Bat (None, 9, 9, 64) 256
chNormalization)
leaky_re_lu_9 (LeakyReLU) (None, 9, 9, 64) 0
dropout_9 (Dropout) (None, 9, 9, 64) 0
conv2d_16 (Conv2D) (None, 5, 5, 128) 73856
batch_normalization_11 (Bat (None, 5, 5, 128) 512
chNormalization)
leaky_re_lu_10 (LeakyReLU) (None, 5, 5, 128) 0
dropout_10 (Dropout) (None, 5, 5, 128) 0
conv2d_17 (Conv2D) (None, 5, 5, 256) 295168
batch_normalization_12 (Bat (None, 5, 5, 256) 1024
chNormalization)
leaky_re_lu_11 (LeakyReLU) (None, 5, 5, 256) 0
dropout_11 (Dropout) (None, 5, 5, 256) 0
flatten_2 (Flatten) (None, 6400) 0
dense_4 (Dense) (None, 1) 6401
=================================================================
Total params: 396,609
Trainable params: 395,713
Non-trainable params: 896
_________________________________________________________________
Model: "sequential_5"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense_5 (Dense) (None, 8192) 827392
reshape_2 (Reshape) (None, 8, 8, 128) 0
up_sampling2d_4 (UpSampling (None, 16, 16, 128) 0
2D)
conv2d_18 (Conv2D) (None, 16, 16, 128) 147584
batch_normalization_13 (Bat (None, 16, 16, 128) 512
chNormalization)
activation_6 (Activation) (None, 16, 16, 128) 0
up_sampling2d_5 (UpSampling (None, 32, 32, 128) 0
2D)
conv2d_19 (Conv2D) (None, 32, 32, 64) 73792
batch_normalization_14 (Bat (None, 32, 32, 64) 256
chNormalization)
activation_7 (Activation) (None, 32, 32, 64) 0
conv2d_20 (Conv2D) (None, 32, 32, 3) 1731
activation_8 (Activation) (None, 32, 32, 3) 0
=================================================================
Total params: 1,051,267
Trainable params: 1,050,883
Non-trainable params: 384
_________________________________________________________________
def show_imgs(epoch):
r, c = 4,4
noise = np.random.normal(0, 1, (r * c,latent_dim))
gen_imgs = generator.predict(noise)
# Rescale images 0 - 1
gen_imgs = 0.5 * gen_imgs + 0.5
fig, axs = plt.subplots(r, c)
cnt = 0
for i in range(r):
for j in range(c):
axs[i,j].imshow(gen_imgs[cnt, :,:,])
axs[i,j].axis('off')
cnt += 1
plt.show()
plt.close()
def show_losses(losses):
losses = np.array(losses)
fig, ax = plt.subplots()
plt.plot(losses.T[0], label='Discriminator')
plt.plot(losses.T[1], label='Generator')
plt.title("Training Losses")
plt.legend()
plt.show()
epochs=3000
batch_size=32
display_interval=5000
losses=[]
#normalizing the input
X_train = X_train / 127.5 - 1.
# Adversarial ground truths
valid = np.ones((batch_size, 1))
#let's add some noise
valid += 0.05 * np.random.random(valid.shape)
fake = np.zeros((batch_size, 1))
fake += 0.05 * np.random.random(fake.shape)
for epoch in range(epochs):
# Train Discriminator
# Select a random half of images
idx = np.random.randint(0, X_train.shape[0], batch_size)
imgs = X_train[idx]
# Sample noise and generate a batch of new images
noise = np.random.normal(0, 1, (batch_size, latent_dim))
gen_imgs = generator.predict(noise)
# Train the discriminator (real classified as ones and generated as zeros)
d_loss_real = discriminator.train_on_batch(imgs, valid)
d_loss_fake = discriminator.train_on_batch(gen_imgs, fake)
d_loss = 0.5 * np.add(d_loss_real, d_loss_fake)
# Train Generator
# Train the generator (wants discriminator to mistake images as real)
g_loss = combined.train_on_batch(noise, valid)
# Plot the progress
if epoch % 5000==0:
print ("%d [D loss: %f] [G loss: %f]" % (epoch, d_loss[0], g_loss))
if epoch % 1000==0:
losses.append((d_loss[0],g_loss))
if epoch % display_interval == 0:
show_imgs(epoch)
1/1 [==============================] - 0s 85ms/step 0 [D loss: 1.327108] [G loss: 0.599327] 1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 32ms/step 1/1 [==============================] - 0s 39ms/step 1/1 [==============================] - 0s 34ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 28ms/step 1/1 [==============================] - 0s 31ms/step 1/1 [==============================] - 0s 50ms/step 1/1 [==============================] - 0s 31ms/step 1/1 [==============================] - 0s 28ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 28ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 29ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 27ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 27ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 54ms/step 1/1 [==============================] - 0s 46ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 60ms/step 1/1 [==============================] - 0s 41ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 45ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 25ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 25ms/step 1/1 [==============================] - 0s 30ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 37ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 47ms/step 1/1 [==============================] - 0s 26ms/step 1/1 [==============================] - 0s 36ms/step 1/1 [==============================] - 0s 41ms/step 1/1 [==============================] - 0s 35ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 53ms/step 1/1 [==============================] - 0s 91ms/step 1/1 [==============================] - 0s 28ms/step 1/1 [==============================] - 0s 49ms/step 1/1 [==============================] - 0s 58ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 54ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 60ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 73ms/step 1/1 [==============================] - 0s 96ms/step 1/1 [==============================] - 0s 83ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 26ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 26ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 40ms/step 1/1 [==============================] - 0s 26ms/step 1/1 [==============================] - 0s 32ms/step 1/1 [==============================] - 0s 35ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 46ms/step 1/1 [==============================] - 0s 54ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 56ms/step 1/1 [==============================] - 0s 29ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 55ms/step 1/1 [==============================] - 0s 53ms/step 1/1 [==============================] - 0s 85ms/step 1/1 [==============================] - 0s 36ms/step 1/1 [==============================] - 0s 27ms/step 1/1 [==============================] - 0s 32ms/step 1/1 [==============================] - 0s 25ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 25ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 34ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 34ms/step 1/1 [==============================] - 0s 50ms/step 1/1 [==============================] - 0s 39ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 40ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 97ms/step 1/1 [==============================] - 0s 60ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 28ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 38ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 32ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 28ms/step 1/1 [==============================] - 0s 33ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 55ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 79ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 60ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 32ms/step 1/1 [==============================] - 0s 54ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 37ms/step 1/1 [==============================] - 0s 26ms/step 1/1 [==============================] - 0s 29ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 31ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 34ms/step 1/1 [==============================] - 0s 37ms/step 1/1 [==============================] - 0s 58ms/step 1/1 [==============================] - 0s 37ms/step 1/1 [==============================] - 0s 52ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 32ms/step 1/1 [==============================] - 0s 52ms/step 1/1 [==============================] - 0s 47ms/step 1/1 [==============================] - 0s 38ms/step 1/1 [==============================] - 0s 60ms/step 1/1 [==============================] - 0s 29ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 50ms/step 1/1 [==============================] - 0s 57ms/step 1/1 [==============================] - 0s 41ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 35ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 31ms/step 1/1 [==============================] - 0s 33ms/step 1/1 [==============================] - 0s 26ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 32ms/step 1/1 [==============================] - 0s 40ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 28ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 74ms/step 1/1 [==============================] - 0s 78ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 35ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 48ms/step 1/1 [==============================] - 0s 52ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 36ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 99ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 46ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 25ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 26ms/step 1/1 [==============================] - 0s 25ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 31ms/step 1/1 [==============================] - 0s 38ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 33ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 12ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 24ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 18ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 23ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 17ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 20ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 22ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 21ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 15ms/step 1/1 [==============================] - 0s 16ms/step 1/1 [==============================] - 0s 19ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 14ms/step 1/1 [==============================] - 0s 13ms/step 1/1 [==============================] - 0s 13ms/step
show_losses(losses)
s=X_train[:40]
s = 0.5 * s + 0.5
f, ax = plt.subplots(5,8, figsize=(16,10))
for i, img in enumerate(s):
ax[i//8, i%8].imshow(img)
ax[i//8, i%8].axis('off')
plt.show()
noise = np.random.normal(size=(40, latent_dim))
generated_images = generator.predict(noise)
generated_images = 0.5 * generated_images + 0.5
f, ax = plt.subplots(5,8, figsize=(16,10))
for i, img in enumerate(generated_images):
ax[i//8, i%8].imshow(img)
ax[i//8, i%8].axis('off')
plt.show()
2/2 [==============================] - 0s 5ms/step